From a2d95b62c19b497d7184af177c84420d3e54ac52 Mon Sep 17 00:00:00 2001 From: Qin Yu Date: Thu, 26 Sep 2024 21:01:54 +0200 Subject: [PATCH 01/32] refactor(zoo): show `nickname` and `name` for bioimage.io models --- plantseg/core/zoo.py | 27 +++++++++++++++++++-------- 1 file changed, 19 insertions(+), 8 deletions(-) diff --git a/plantseg/core/zoo.py b/plantseg/core/zoo.py index fb2d1fc1..dbd3164f 100644 --- a/plantseg/core/zoo.py +++ b/plantseg/core/zoo.py @@ -1,5 +1,7 @@ """Model Zoo Singleton""" +# pylint: disable=C0116,C0103 + import json import logging from enum import Enum @@ -392,15 +394,24 @@ def refresh_bioimageio_zoo_urls(self): collection_path = Path(pooch.retrieve(BIOIMAGE_IO_COLLECTION_URL, known_hash=None)) with collection_path.open(encoding='utf-8') as f: collection = json.load(f) + + max_nickname_length = max( # Find the longest nickname for formatting + len(entry["nickname"]) for entry in collection["collection"] if entry["type"] == "model" + ) + + def truncate_name(name, length=50): + return (name[:length] + '...') if len(name) > length else name + + def build_model_url_dict(filter_func): + return { + f"{entry['nickname']:<{max_nickname_length}}: {truncate_name(entry['name'])}": entry["rdf_source"] + for entry in collection["collection"] + if entry["type"] == "model" and filter_func(entry) + } + self._bioimageio_zoo_collection = collection - self._bioimageio_zoo_all_model_url_dict = { - entry["nickname"]: entry["rdf_source"] for entry in collection["collection"] if entry["type"] == "model" - } - self._bioimageio_zoo_plantseg_model_url_dict = { - entry["nickname"]: entry["rdf_source"] - for entry in collection["collection"] - if entry["type"] == "model" and self._is_plantseg_model(entry) - } + self._bioimageio_zoo_all_model_url_dict = build_model_url_dict(lambda entry: True) + self._bioimageio_zoo_plantseg_model_url_dict = build_model_url_dict(self._is_plantseg_model) def _is_plantseg_model(self, collection_entry: dict) -> bool: """Determines if the 'tags' field in a collection entry contains the keyword 'plantseg'.""" From 33b6c3925f50ec68ec59ed637ac25fc44010b789 Mon Sep 17 00:00:00 2001 From: Qin Yu Date: Thu, 26 Sep 2024 21:07:31 +0200 Subject: [PATCH 02/32] refactor(zoo): improve code --- plantseg/core/zoo.py | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/plantseg/core/zoo.py b/plantseg/core/zoo.py index dbd3164f..4dea366e 100644 --- a/plantseg/core/zoo.py +++ b/plantseg/core/zoo.py @@ -395,22 +395,21 @@ def refresh_bioimageio_zoo_urls(self): with collection_path.open(encoding='utf-8') as f: collection = json.load(f) - max_nickname_length = max( # Find the longest nickname for formatting - len(entry["nickname"]) for entry in collection["collection"] if entry["type"] == "model" - ) + models = [entry for entry in collection["collection"] if entry["type"] == "model"] + max_nickname_length = max(len(entry["nickname"]) for entry in models) - def truncate_name(name, length=50): - return (name[:length] + '...') if len(name) > length else name + def truncate_name(name, length=40): + return name[:length] + '...' if len(name) > length else name - def build_model_url_dict(filter_func): + def build_model_url_dict(filter_func=None): + filtered_models = filter(filter_func, models) if filter_func else models return { f"{entry['nickname']:<{max_nickname_length}}: {truncate_name(entry['name'])}": entry["rdf_source"] - for entry in collection["collection"] - if entry["type"] == "model" and filter_func(entry) + for entry in filtered_models } self._bioimageio_zoo_collection = collection - self._bioimageio_zoo_all_model_url_dict = build_model_url_dict(lambda entry: True) + self._bioimageio_zoo_all_model_url_dict = build_model_url_dict() self._bioimageio_zoo_plantseg_model_url_dict = build_model_url_dict(self._is_plantseg_model) def _is_plantseg_model(self, collection_entry: dict) -> bool: From be666b23e9dec413554c5dfb72c3feeefdb427e2 Mon Sep 17 00:00:00 2001 From: Qin Yu Date: Thu, 26 Sep 2024 21:09:32 +0200 Subject: [PATCH 03/32] gui(zoo): set model choices font --- plantseg/viewer_napari/containers.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/plantseg/viewer_napari/containers.py b/plantseg/viewer_napari/containers.py index 7f8b0429..bb4fd1f3 100644 --- a/plantseg/viewer_napari/containers.py +++ b/plantseg/viewer_napari/containers.py @@ -1,4 +1,5 @@ from magicgui.widgets import Container +from qtpy.QtGui import QFont from plantseg.viewer_napari.widgets import ( widget_add_custom_model, @@ -27,6 +28,7 @@ ) STYLE_SLIDER = "font-size: 9pt;" +MONOSPACE_FONT = QFont("Courier New", 9) # "Courier New" is a common monospaced font def get_data_io_tab(): @@ -57,6 +59,7 @@ def get_preprocessing_tab(): def get_segmentation_tab(): + widget_unet_prediction.model_id.native.setFont(MONOSPACE_FONT) container = Container( widgets=[ widget_unet_prediction, From ea484c72267c24d649693fb7470014366bf639f7 Mon Sep 17 00:00:00 2001 From: Qin Yu Date: Thu, 26 Sep 2024 21:16:30 +0200 Subject: [PATCH 04/32] gui: set fixed width for widget dock --- plantseg/core/zoo.py | 2 +- plantseg/viewer_napari/viewer.py | 3 ++- plantseg/viewer_napari/widgets/prediction.py | 1 - 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/plantseg/core/zoo.py b/plantseg/core/zoo.py index 4dea366e..f7ec4e20 100644 --- a/plantseg/core/zoo.py +++ b/plantseg/core/zoo.py @@ -398,7 +398,7 @@ def refresh_bioimageio_zoo_urls(self): models = [entry for entry in collection["collection"] if entry["type"] == "model"] max_nickname_length = max(len(entry["nickname"]) for entry in models) - def truncate_name(name, length=40): + def truncate_name(name, length=100): return name[:length] + '...' if len(name) > length else name def build_model_url_dict(filter_func=None): diff --git a/plantseg/viewer_napari/viewer.py b/plantseg/viewer_napari/viewer.py index 02241170..9704e083 100644 --- a/plantseg/viewer_napari/viewer.py +++ b/plantseg/viewer_napari/viewer.py @@ -25,7 +25,8 @@ def run_viewer(): (get_proofreading_tab(), 'Proofreading'), (get_extras_tab(), 'Models'), ]: - viewer.window.add_dock_widget(_containers, name=name, tabify=True) + this_widget = viewer.window.add_dock_widget(_containers, name=name, tabify=True) + this_widget.setFixedWidth(666) # Show data tab by default viewer.window._dock_widgets['Input/Output'].show() diff --git a/plantseg/viewer_napari/widgets/prediction.py b/plantseg/viewer_napari/widgets/prediction.py index 336a7167..8718f8d6 100644 --- a/plantseg/viewer_napari/widgets/prediction.py +++ b/plantseg/viewer_napari/widgets/prediction.py @@ -5,7 +5,6 @@ from pathlib import Path from typing import Optional -import napari import torch.cuda from magicgui import magicgui from magicgui.types import Separator From e66fb1fbeb13af4ae5c899560e7822fb0a69f66c Mon Sep 17 00:00:00 2001 From: Qin Yu Date: Tue, 26 Nov 2024 18:15:40 +0100 Subject: [PATCH 05/32] chore(macOS): development environment for Apple Sillicon --- .gitignore | 3 +++ environment-dev-apple.yaml | 46 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 49 insertions(+) create mode 100755 environment-dev-apple.yaml diff --git a/.gitignore b/.gitignore index f5f1aa4d..0c3c4204 100644 --- a/.gitignore +++ b/.gitignore @@ -14,3 +14,6 @@ docs/_build/ # Codecov .coverage + +# macOS +.DS_Store diff --git a/environment-dev-apple.yaml b/environment-dev-apple.yaml new file mode 100755 index 00000000..4e99a587 --- /dev/null +++ b/environment-dev-apple.yaml @@ -0,0 +1,46 @@ +name: plant-seg-dev +channels: + - pytorch + - conda-forge + # `defaults` is optional, unless e.g. `conda-forge` has no cudnn 9.* when `defaults` has. + # `defaults` of Anaconda is not accessible for many non-profit institutes such as EMBL. + # - defaults +dependencies: + - python + # Neural Network and GPU + - pytorch::pytorch + - torchvision + # Bioimage and CV + - tifffile + - h5py + - zarr + - vigra + - python-elf + - python-graphviz + - scikit-image + - bioimageio.core>=0.6.5 + # GUI + - pyqt + - napari + # Other + - requests + - pyyaml + - pydantic>2,<2.10 # 2.10 cause problem spec-bioimage-io/issues/663 + # Test + - pytest + - pytest-qt + - pytest-mock + - requests-mock + # CI/CD + - pre-commit + - bump-my-version + # Docs + - mkdocs-material + - mkdocs-autorefs + - mkdocs-git-revision-date-localized-plugin + - mkdocs-git-committers-plugin-2 + - mkdocstrings-python + - pip: + - markdown-exec + # PlantSeg + - -e . From 38f993a3951c672aa88a4c74b71169856cc34ddd Mon Sep 17 00:00:00 2001 From: Qin Yu Date: Tue, 26 Nov 2024 21:50:44 +0100 Subject: [PATCH 06/32] fix(gui): always filters under mode --- plantseg/viewer_napari/widgets/prediction.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plantseg/viewer_napari/widgets/prediction.py b/plantseg/viewer_napari/widgets/prediction.py index e68ad581..3735db6b 100644 --- a/plantseg/viewer_napari/widgets/prediction.py +++ b/plantseg/viewer_napari/widgets/prediction.py @@ -128,8 +128,8 @@ def to_choices(cls): ) def widget_unet_prediction( image: Image, - plantseg_filter: bool = True, mode: UNetPredictionMode = UNetPredictionMode.PLANTSEG, + plantseg_filter: bool = True, model_name: Optional[str] = None, model_id: Optional[str] = None, device: str = ALL_DEVICES[0], From 841ea4d6a54f6ef369e2d03c3db3d1cf112b76f3 Mon Sep 17 00:00:00 2001 From: Qin Yu Date: Tue, 3 Dec 2024 01:30:17 +0100 Subject: [PATCH 07/32] refactor(zoo): use `ModelZoo.models_bioimageio` --- plantseg/core/zoo.py | 36 +++++++++++++++++++++++++++++++++--- 1 file changed, 33 insertions(+), 3 deletions(-) diff --git a/plantseg/core/zoo.py b/plantseg/core/zoo.py index f7ec4e20..f1d64d82 100644 --- a/plantseg/core/zoo.py +++ b/plantseg/core/zoo.py @@ -50,7 +50,6 @@ class ModelZooRecord(BaseModel): name: str url: Optional[str] = Field(None, validation_alias=AliasChoices('model_url', 'url')) path: Optional[str] = None - id: Optional[str] = None description: Optional[str] = None resolution: Optional[tuple[float, float, float]] = None dimensionality: Optional[str] = None @@ -60,11 +59,25 @@ class ModelZooRecord(BaseModel): doi: Optional[str] = None added_by: Optional[str] = None + # BioImage.IO models specific fields. TODO: unify. + id: Optional[str] = None + name_display: Optional[str] = None + rdf_source: Optional[str] = None + supported: Optional[bool] = None + @model_validator(mode='after') def check_one_id_present(self) -> Self: """Check that one of url (zenodo), path (custom/local) or id (bioimage.io) is present""" if self.url is None and self.path is None and self.id is None: - raise ValueError(f'One of url, path or id must be present: {self}') + raise ValueError(f'One of `url`, `path` or `id` must be present: {self}') + return self + + @model_validator(mode='after') + def check_id_fields_present(self) -> Self: + if self.id is not None and (self.name_display is None or self.rdf_source is None or self.supported is None): + raise ValueError( + f'If `id` exists, then `name_display`, `rdf_source` and `supported` must be present: {self}' + ) return self @@ -384,6 +397,16 @@ def get_model_by_id(self, model_id: str): logger_zoo.info(f"Loaded model from BioImage.IO Model Zoo: {model_id}") return model, model_config, model_weights_path + def _init_bioimageio_zoo_df(self) -> None: + records = [] + for _, model in self._bioimageio_zoo_all_model_url_dict.items(): + records.append(ModelZooRecord(**model, added_by=Author.BIOIMAGEIO).model_dump()) + + self.models_bioimageio = DataFrame( + records, + columns=list(ModelZooRecord.model_fields.keys()), + ).set_index('id') + def refresh_bioimageio_zoo_urls(self): """Initialize the BioImage.IO Model Zoo collection and URL dictionaries. @@ -404,13 +427,20 @@ def truncate_name(name, length=100): def build_model_url_dict(filter_func=None): filtered_models = filter(filter_func, models) if filter_func else models return { - f"{entry['nickname']:<{max_nickname_length}}: {truncate_name(entry['name'])}": entry["rdf_source"] + entry['name']: { + "id": entry["nickname"], + "name": entry["name"], + "name_display": f"{entry['nickname']:<{max_nickname_length}}: {truncate_name(entry['name'])}", + "rdf_source": entry["rdf_source"], + "supported": self._is_plantseg_model(entry), + } for entry in filtered_models } self._bioimageio_zoo_collection = collection self._bioimageio_zoo_all_model_url_dict = build_model_url_dict() self._bioimageio_zoo_plantseg_model_url_dict = build_model_url_dict(self._is_plantseg_model) + self._init_bioimageio_zoo_df() def _is_plantseg_model(self, collection_entry: dict) -> bool: """Determines if the 'tags' field in a collection entry contains the keyword 'plantseg'.""" From 46736a20585a50fbb4891a0280be34698528f495 Mon Sep 17 00:00:00 2001 From: Qin Yu Date: Tue, 3 Dec 2024 02:17:38 +0100 Subject: [PATCH 08/32] test(zoo): `ModelZoo.models_bioimageio` is manually initialised --- plantseg/core/zoo.py | 2 ++ tests/core/test_zoo.py | 6 ++++++ 2 files changed, 8 insertions(+) diff --git a/plantseg/core/zoo.py b/plantseg/core/zoo.py index f1d64d82..5274ebed 100644 --- a/plantseg/core/zoo.py +++ b/plantseg/core/zoo.py @@ -412,6 +412,8 @@ def refresh_bioimageio_zoo_urls(self): The BioImage.IO Model Zoo collection is not downloaded during ModelZoo initialization to avoid unnecessary network requests. This method downloads the collection and extracts the model URLs for all models. + + Note that `models_bioimageio` doesn't exist until this method is called. """ logger_zoo.info(f"Fetching BioImage.IO Model Zoo collection from {BIOIMAGE_IO_COLLECTION_URL}") collection_path = Path(pooch.retrieve(BIOIMAGE_IO_COLLECTION_URL, known_hash=None)) diff --git a/tests/core/test_zoo.py b/tests/core/test_zoo.py index 1f5dc757..2e0788a2 100644 --- a/tests/core/test_zoo.py +++ b/tests/core/test_zoo.py @@ -62,3 +62,9 @@ def test_halo_computation_for_bioimageio_model(self, model_id): model, _, _ = model_zoo.get_model_by_id(model_id) halo = model_zoo.compute_halo(model) assert halo == 44 + + def test_models_bioimageio(self): + """`model_zoo` has no `models_bioimageio` attribute until `.refresh_bioimageio_zoo_urls()` is called.""" + assert not hasattr(model_zoo, 'models_bioimageio') + model_zoo.refresh_bioimageio_zoo_urls() + assert hasattr(model_zoo, 'models_bioimageio') From 1e119bf23e57fae13f237122559990458ebed57b Mon Sep 17 00:00:00 2001 From: Qin Yu Date: Tue, 3 Dec 2024 02:19:46 +0100 Subject: [PATCH 09/32] refactor(zoo): use `.models_bioimageio` DataFrame instead of dict --- plantseg/core/zoo.py | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/plantseg/core/zoo.py b/plantseg/core/zoo.py index 5274ebed..3916d7b3 100644 --- a/plantseg/core/zoo.py +++ b/plantseg/core/zoo.py @@ -97,7 +97,6 @@ class ModelZoo: _zoo_custom_dict: dict = {} _bioimageio_zoo_collection: dict = {} _bioimageio_zoo_all_model_url_dict: dict = {} - _bioimageio_zoo_plantseg_model_url_dict: dict = {} path_zoo: Path = PATH_MODEL_ZOO path_zoo_custom: Path = PATH_MODEL_ZOO_CUSTOM @@ -343,13 +342,13 @@ def get_model_by_id(self, model_id: str): https://bioimage-io.github.io/collection-bioimage-io/rdfs/10.5281/zenodo.8401064/8429203/rdf.yaml """ - if not self._bioimageio_zoo_all_model_url_dict: + if not self.models_bioimageio: self.refresh_bioimageio_zoo_urls() - if model_id not in self._bioimageio_zoo_all_model_url_dict: + if model_id not in self.models_bioimageio.index: raise ValueError(f"Model ID {model_id} not found in BioImage.IO Model Zoo") - rdf_url = self._bioimageio_zoo_all_model_url_dict[model_id] + rdf_url = self.models_bioimageio.at[model_id, 'rdf_source'] model_description = load_description(rdf_url) # Check if description is `ResourceDescr` @@ -441,7 +440,6 @@ def build_model_url_dict(filter_func=None): self._bioimageio_zoo_collection = collection self._bioimageio_zoo_all_model_url_dict = build_model_url_dict() - self._bioimageio_zoo_plantseg_model_url_dict = build_model_url_dict(self._is_plantseg_model) self._init_bioimageio_zoo_df() def _is_plantseg_model(self, collection_entry: dict) -> bool: @@ -458,15 +456,15 @@ def _is_plantseg_model(self, collection_entry: dict) -> bool: def get_bioimageio_zoo_plantseg_model_names(self) -> list[str]: """Return a list of model names in the BioImage.IO Model Zoo tagged with 'plantseg'.""" - if not self._bioimageio_zoo_plantseg_model_url_dict: + if not self.models_bioimageio: self.refresh_bioimageio_zoo_urls() - return sorted(list(self._bioimageio_zoo_plantseg_model_url_dict.keys())) + return sorted(model_zoo.models_bioimageio[model_zoo.models_bioimageio["supported"]].index.to_list()) def get_bioimageio_zoo_all_model_names(self) -> list[str]: """Return a list of all model names in the BioImage.IO Model Zoo.""" - if not self._bioimageio_zoo_all_model_url_dict: + if not self.models_bioimageio: self.refresh_bioimageio_zoo_urls() - return sorted(list(self._bioimageio_zoo_all_model_url_dict.keys())) + return sorted(model_zoo.models_bioimageio.index.to_list()) def get_bioimageio_zoo_other_model_names(self) -> list[str]: """Return a list of model names in the BioImage.IO Model Zoo not tagged with 'plantseg'.""" From d204ee74e95fb5027146454fca44ea0961e6e6cb Mon Sep 17 00:00:00 2001 From: Qin Yu Date: Tue, 3 Dec 2024 02:20:45 +0100 Subject: [PATCH 10/32] refactor(zoo): use `pydantic.HttpUrl` for URLs --- plantseg/core/zoo.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/plantseg/core/zoo.py b/plantseg/core/zoo.py index 3916d7b3..9e1699fc 100644 --- a/plantseg/core/zoo.py +++ b/plantseg/core/zoo.py @@ -16,7 +16,7 @@ from bioimageio.spec.model.v0_5 import ModelDescr as ModelDescr_v0_5 from bioimageio.spec.utils import download from pandas import DataFrame, concat -from pydantic import AliasChoices, BaseModel, Field, model_validator +from pydantic import AliasChoices, BaseModel, Field, HttpUrl, model_validator from torch.nn import Conv2d, Conv3d, MaxPool2d, MaxPool3d, Module from plantseg import ( @@ -48,7 +48,7 @@ class ModelZooRecord(BaseModel): """Model Zoo Record""" name: str - url: Optional[str] = Field(None, validation_alias=AliasChoices('model_url', 'url')) + url: Optional[HttpUrl] = Field(None, validation_alias=AliasChoices('model_url', 'url')) path: Optional[str] = None description: Optional[str] = None resolution: Optional[tuple[float, float, float]] = None @@ -62,7 +62,7 @@ class ModelZooRecord(BaseModel): # BioImage.IO models specific fields. TODO: unify. id: Optional[str] = None name_display: Optional[str] = None - rdf_source: Optional[str] = None + rdf_source: Optional[HttpUrl] = None supported: Optional[bool] = None @model_validator(mode='after') @@ -374,6 +374,8 @@ def get_model_by_id(self, model_id: str): elif isinstance(model_description, ModelDescr_v0_5): # then it is `ArchitectureDescr` with `callable` architecture_callable = model_description.weights.pytorch_state_dict.architecture.callable architecture_kwargs = model_description.weights.pytorch_state_dict.architecture.kwargs + else: + raise ValueError(f"Unsupported model description format: {type(model_description).__name__}") logger_zoo.info(f"Got {architecture_callable} model with kwargs {architecture_kwargs}.") # Create model from architecture and kwargs From 2cd47a42d56019ba6a3c6b6976b9861f20a89f29 Mon Sep 17 00:00:00 2001 From: Qin Yu Date: Tue, 3 Dec 2024 02:30:00 +0100 Subject: [PATCH 11/32] fix(zoo): fix bioimageio model zoo df initialisation --- plantseg/core/zoo.py | 6 +++--- tests/core/test_zoo.py | 8 ++------ 2 files changed, 5 insertions(+), 9 deletions(-) diff --git a/plantseg/core/zoo.py b/plantseg/core/zoo.py index 9e1699fc..2e0bffb6 100644 --- a/plantseg/core/zoo.py +++ b/plantseg/core/zoo.py @@ -342,7 +342,7 @@ def get_model_by_id(self, model_id: str): https://bioimage-io.github.io/collection-bioimage-io/rdfs/10.5281/zenodo.8401064/8429203/rdf.yaml """ - if not self.models_bioimageio: + if not hasattr(self, 'models_bioimageio'): self.refresh_bioimageio_zoo_urls() if model_id not in self.models_bioimageio.index: @@ -458,13 +458,13 @@ def _is_plantseg_model(self, collection_entry: dict) -> bool: def get_bioimageio_zoo_plantseg_model_names(self) -> list[str]: """Return a list of model names in the BioImage.IO Model Zoo tagged with 'plantseg'.""" - if not self.models_bioimageio: + if not hasattr(self, 'models_bioimageio'): self.refresh_bioimageio_zoo_urls() return sorted(model_zoo.models_bioimageio[model_zoo.models_bioimageio["supported"]].index.to_list()) def get_bioimageio_zoo_all_model_names(self) -> list[str]: """Return a list of all model names in the BioImage.IO Model Zoo.""" - if not self.models_bioimageio: + if not hasattr(self, 'models_bioimageio'): self.refresh_bioimageio_zoo_urls() return sorted(model_zoo.models_bioimageio.index.to_list()) diff --git a/tests/core/test_zoo.py b/tests/core/test_zoo.py index 2e0788a2..8a1b92f5 100644 --- a/tests/core/test_zoo.py +++ b/tests/core/test_zoo.py @@ -47,6 +47,8 @@ def test_model_output_normalisation(self, model_name): class TestBioImageIOModelZoo: """Test the BioImage.IO model zoo""" + model_zoo.refresh_bioimageio_zoo_urls() + @pytest.mark.parametrize("model_id", MODEL_IDS) def test_get_model_by_id(self, model_id): """Try to load a model from the BioImage.IO model zoo by ID.""" @@ -62,9 +64,3 @@ def test_halo_computation_for_bioimageio_model(self, model_id): model, _, _ = model_zoo.get_model_by_id(model_id) halo = model_zoo.compute_halo(model) assert halo == 44 - - def test_models_bioimageio(self): - """`model_zoo` has no `models_bioimageio` attribute until `.refresh_bioimageio_zoo_urls()` is called.""" - assert not hasattr(model_zoo, 'models_bioimageio') - model_zoo.refresh_bioimageio_zoo_urls() - assert hasattr(model_zoo, 'models_bioimageio') From 365649614d918a8df3084efadd7a7ab8b0557493 Mon Sep 17 00:00:00 2001 From: Qin Yu Date: Thu, 5 Dec 2024 18:02:02 +0100 Subject: [PATCH 12/32] fix(zoo): update bioimageio model zoo collection file link --- plantseg/core/zoo.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/plantseg/core/zoo.py b/plantseg/core/zoo.py index 2e0bffb6..69e3fa50 100644 --- a/plantseg/core/zoo.py +++ b/plantseg/core/zoo.py @@ -39,9 +39,7 @@ class Author(str, Enum): USER = 'user' -BIOIMAGE_IO_COLLECTION_URL = ( - "https://raw.githubusercontent.com/bioimage-io/collection-bioimage-io/gh-pages/collection.json" -) +BIOIMAGE_IO_COLLECTION_URL = "https://uk1s3.embassy.ebi.ac.uk/public-datasets/bioimage.io/collection.json" class ModelZooRecord(BaseModel): @@ -421,8 +419,11 @@ def refresh_bioimageio_zoo_urls(self): with collection_path.open(encoding='utf-8') as f: collection = json.load(f) + def get_id(entry): + return entry["id"] if "nickname" not in entry else entry["nickname"] + models = [entry for entry in collection["collection"] if entry["type"] == "model"] - max_nickname_length = max(len(entry["nickname"]) for entry in models) + max_nickname_length = max(len(get_id(entry)) for entry in models) def truncate_name(name, length=100): return name[:length] + '...' if len(name) > length else name @@ -431,9 +432,9 @@ def build_model_url_dict(filter_func=None): filtered_models = filter(filter_func, models) if filter_func else models return { entry['name']: { - "id": entry["nickname"], + "id": get_id(entry), "name": entry["name"], - "name_display": f"{entry['nickname']:<{max_nickname_length}}: {truncate_name(entry['name'])}", + "name_display": f"{get_id(entry):<{max_nickname_length}}: {truncate_name(entry['name'])}", "rdf_source": entry["rdf_source"], "supported": self._is_plantseg_model(entry), } From c0ceb3a0461e805fef2a74e05137380a568eccd7 Mon Sep 17 00:00:00 2001 From: Qin Yu Date: Sat, 14 Dec 2024 01:07:51 +0100 Subject: [PATCH 13/32] feat(pred)!: use `bioimageio.core` for BioImage.IO Model Zoo model inference --- plantseg/functionals/prediction/prediction.py | 59 ++++++++++++++++++- 1 file changed, 58 insertions(+), 1 deletion(-) diff --git a/plantseg/functionals/prediction/prediction.py b/plantseg/functionals/prediction/prediction.py index c8ae5e81..33a5eb0d 100644 --- a/plantseg/functionals/prediction/prediction.py +++ b/plantseg/functionals/prediction/prediction.py @@ -1,8 +1,16 @@ import logging from pathlib import Path +from typing import assert_never import numpy as np import torch +from bioimageio.core.axis import AxisId +from bioimageio.core.prediction import predict +from bioimageio.core.sample import Sample +from bioimageio.core.tensor import Tensor +from bioimageio.spec import load_model_description +from bioimageio.spec.model import v0_4, v0_5 +from bioimageio.spec.model.v0_5 import TensorId from plantseg.core.zoo import model_zoo from plantseg.functionals.dataprocessing.dataprocessing import ImageLayout, fix_layout_to_CZYX, fix_layout_to_ZYX @@ -16,6 +24,52 @@ logger = logging.getLogger(__name__) +def biio_prediction( + raw: np.ndarray, + input_layout: ImageLayout, + model_id: str, +) -> np.ndarray: + model = load_model_description(model_id) + if isinstance(model, v0_4.ModelDescr): + input_ids = [input_tensor.name for input_tensor in model.inputs] + elif isinstance(model, v0_5.ModelDescr): + input_ids = [input_tensor.id for input_tensor in model.inputs] + else: + assert_never(model) + + if len(input_ids) < 1: + logger.error("Model needs no input tensor.") + if len(input_ids) > 1: + logger.warning("Model needs more than one input tensor. PlantSeg does not support this yet.") + tensor_id = input_ids[0] + + logger.info(f"model expects these inputs: {input_ids}") + + assert isinstance(input_layout, str) + dims = tuple( + 'channel' if item.lower() == 'c' else item.lower() for item in input_layout + ) # `AxisId` has to be "channel" not "c" + sample = Sample( + members={ + TensorId(tensor_id): Tensor(array=raw, dims=dims).transpose( + [AxisId(a) if isinstance(a, str) else a.id for a in model.inputs[0].axes] + ) + }, + stat={}, + id="raw", + ) + + sample_out = predict(model=model, inputs=sample) + assert isinstance(sample_out, Sample) + if len(sample_out.members) != 1: + logger.warning("Model has more than one output tensor. PlantSeg does not support this yet.") + key = list(sample_out.members.keys())[0] + pmaps = sample_out.members[key].data.to_numpy()[0] + assert pmaps.ndim == 4, f"Expected 4D CZXY prediction from `biio_prediction()`, got {pmaps.ndim}D" + + return pmaps + + def unet_prediction( raw: np.ndarray, input_layout: ImageLayout, @@ -61,7 +115,10 @@ def unet_prediction( model, model_config, model_path = model_zoo.get_model_by_config_path(config_path, model_weights_path) elif model_id is not None: # BioImage.IO zoo mode logger.info("BioImage.IO prediction: Running model from BioImage.IO model zoo.") - model, model_config, model_path = model_zoo.get_model_by_id(model_id) + if True: # NOTE: For now, do not use native pytorch-3dunet prediction if using BioImage.IO models + return biio_prediction(raw=raw, input_layout=input_layout, model_id=model_id) + else: + model, model_config, model_path = model_zoo.get_model_by_id(model_id) elif model_name is not None: # PlantSeg zoo mode logger.info("Zoo prediction: Running model from PlantSeg official zoo.") model, model_config, model_path = model_zoo.get_model_by_name(model_name, model_update=model_update) From 3585207d6467ba608dda4c00d8db89102b1d7765 Mon Sep 17 00:00:00 2001 From: Qin Yu Date: Sat, 14 Dec 2024 01:29:38 +0100 Subject: [PATCH 14/32] docs: explain functionals/tasks/widgets due to #371 --- docs/chapters/getting_started/contributing.md | 4 ++++ docs/chapters/python_api/index.md | 7 +++++++ setup.py | 2 +- 3 files changed, 12 insertions(+), 1 deletion(-) create mode 100644 docs/chapters/python_api/index.md diff --git a/docs/chapters/getting_started/contributing.md b/docs/chapters/getting_started/contributing.md index 7a31fa46..02e00378 100644 --- a/docs/chapters/getting_started/contributing.md +++ b/docs/chapters/getting_started/contributing.md @@ -17,6 +17,10 @@ To install PlantSeg in development mode, run: pip install -e . --no-deps ``` +## Hierarchical Design of PlantSeg + +Please refer to [Python API](../python_api/index.md). + ## Coding Style PlantSeg uses _Ruff_ for linting and formatting. _Ruff_ is compatible with _Black_ for formatting. Ensure you have _Black_ set as the formatter with a line length of 120. diff --git a/docs/chapters/python_api/index.md b/docs/chapters/python_api/index.md new file mode 100644 index 00000000..d65dabbe --- /dev/null +++ b/docs/chapters/python_api/index.md @@ -0,0 +1,7 @@ +# Hierarchical Design of PlantSeg + +PlantSeg is organized into three layers: + + 1. Functionals (Python API): The foundational layer of PlantSeg, providing its core functionality. This layer can be accessed directly in Python scripts or Jupyter notebooks. + 2. Tasks: The intermediate layer of PlantSeg, which encapsulates the functionals to handle resource management and support distributed computing. + 3. Napari Widgets: The top layer of PlantSeg, which integrates tasks into user-friendly widgets for easy interaction within graphical interfaces. diff --git a/setup.py b/setup.py index 9fde15cc..2cb0ca6b 100644 --- a/setup.py +++ b/setup.py @@ -10,7 +10,7 @@ 'plantseg': ['resources/logo_white.png'], }, description='PlantSeg is a tool for cell instance aware segmentation in densely packed 3D volumetric images.', - author='Lorenzo Cerrone, Adrian Wolny', + author='Lorenzo Cerrone, Adrian Wolny, Qin Yu', url='https://github.com/kreshuklab/plant-seg', author_email='lorenzo.cerrone@iwr.uni-heidelberg.de', ) From 97e7c04b9b95c1352abc06f11253a7ae021539fe Mon Sep 17 00:00:00 2001 From: Qin Yu Date: Mon, 16 Dec 2024 14:13:21 +0100 Subject: [PATCH 15/32] fix: `plantseg: command not found` for dev env --- setup.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/setup.py b/setup.py index 2cb0ca6b..710a650f 100644 --- a/setup.py +++ b/setup.py @@ -13,4 +13,9 @@ author='Lorenzo Cerrone, Adrian Wolny, Qin Yu', url='https://github.com/kreshuklab/plant-seg', author_email='lorenzo.cerrone@iwr.uni-heidelberg.de', + entry_points={ + 'console_scripts': [ + 'plantseg=plantseg.run_plantseg:main', + ], + }, ) From f14afc18b5f324b571885dddde2745d336e62f75 Mon Sep 17 00:00:00 2001 From: Qin Yu Date: Mon, 16 Dec 2024 15:08:14 +0100 Subject: [PATCH 16/32] fix: bioimageio `Tensor` needs `AxisId` in some versions --- plantseg/functionals/prediction/prediction.py | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/plantseg/functionals/prediction/prediction.py b/plantseg/functionals/prediction/prediction.py index 33a5eb0d..339c7902 100644 --- a/plantseg/functionals/prediction/prediction.py +++ b/plantseg/functionals/prediction/prediction.py @@ -47,17 +47,14 @@ def biio_prediction( assert isinstance(input_layout, str) dims = tuple( - 'channel' if item.lower() == 'c' else item.lower() for item in input_layout + AxisId('channel') if item.lower() == 'c' else AxisId(item.lower()) for item in input_layout ) # `AxisId` has to be "channel" not "c" - sample = Sample( - members={ - TensorId(tensor_id): Tensor(array=raw, dims=dims).transpose( - [AxisId(a) if isinstance(a, str) else a.id for a in model.inputs[0].axes] - ) - }, - stat={}, - id="raw", - ) + members = { + TensorId(tensor_id): Tensor(array=raw, dims=dims).transpose( + [AxisId(a) if isinstance(a, str) else a.id for a in model.inputs[0].axes] + ) + } + sample = Sample(members=members, stat={}, id="raw") sample_out = predict(model=model, inputs=sample) assert isinstance(sample_out, Sample) From 7a16e04838087b393564ed7a9dbbc02ae69a43d4 Mon Sep 17 00:00:00 2001 From: Qin Yu Date: Mon, 16 Dec 2024 15:08:56 +0100 Subject: [PATCH 17/32] feat: use `predict_sample_with_blocking` --- plantseg/functionals/prediction/prediction.py | 18 +- test_biio.py | 17 + test_zoo.ipynb | 2554 +++++++++++++++++ 3 files changed, 2587 insertions(+), 2 deletions(-) create mode 100644 test_biio.py create mode 100644 test_zoo.ipynb diff --git a/plantseg/functionals/prediction/prediction.py b/plantseg/functionals/prediction/prediction.py index 339c7902..6e1a1b9d 100644 --- a/plantseg/functionals/prediction/prediction.py +++ b/plantseg/functionals/prediction/prediction.py @@ -54,9 +54,23 @@ def biio_prediction( [AxisId(a) if isinstance(a, str) else a.id for a in model.inputs[0].axes] ) } - sample = Sample(members=members, stat={}, id="raw") + input_block_shape = { + TensorId(tensor_id): { + # 'emotional-cricket' has: + # {'batch': None, 'channel': 1, 'z': 100, 'y': 128, 'x': 128} + # + # 'philosophical-panda' has: + # {'z': ParameterizedSize(min=1, step=1), + # 'channel': 2, + # 'y': ParameterizedSize(min=16, step=16), + # 'x': ParameterizedSize(min=16, step=16)} + AxisId(a) if isinstance(a, str) else a.id: a.size if a.size is not None else 1 + for a in model.inputs[0].axes + } + } - sample_out = predict(model=model, inputs=sample) + sample = Sample(members=members, stat={}, id="raw") + sample_out = predict(model=model, inputs=sample, input_block_shape=input_block_shape) assert isinstance(sample_out, Sample) if len(sample_out.members) != 1: logger.warning("Model has more than one output tensor. PlantSeg does not support this yet.") diff --git a/test_biio.py b/test_biio.py new file mode 100644 index 00000000..1f8d2522 --- /dev/null +++ b/test_biio.py @@ -0,0 +1,17 @@ +import numpy as np +from bioimageio.core.prediction import predict +from bioimageio.core.sample import Sample +from bioimageio.core.tensor import Tensor +from bioimageio.spec.model.v0_5 import TensorId + +array = np.random.randint(0, 255, (2, 128, 128, 128), dtype=np.uint8) +dims = ('c', 'z', 'y', 'x') +sample = Sample(members={TensorId('a'): Tensor(array=array, dims=dims)}, stat={}, id='try') + +temp = predict( + # model='https://uk1s3.embassy.ebi.ac.uk/public-datasets/bioimage.io/philosophical-panda/0.0.11/files/rdf.yaml', + model='https://uk1s3.embassy.ebi.ac.uk/public-datasets/bioimage.io/emotional-cricket/1.1/files/rdf.yaml', + # model='/Users/qin/Downloads/efficient-chipmunk.yaml', + inputs=sample, + sample_id='sample', +) diff --git a/test_zoo.ipynb b/test_zoo.ipynb new file mode 100644 index 00000000..5cf42488 --- /dev/null +++ b/test_zoo.ipynb @@ -0,0 +1,2554 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "INFO: P [MainThread] 2024-12-05 16:54:48,153 plantseg - Logger configured at initialisation. PlantSeg logger name: plantseg\n" + ] + } + ], + "source": [ + "from plantseg.core.zoo import model_zoo" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "PlantSeg version: 2.0.0a7\n", + "PyTorch version: 2.2.2\n" + ] + } + ], + "source": [ + "import torch\n", + "\n", + "from plantseg import __version__\n", + "\n", + "print(f\"PlantSeg version: {__version__.__version__}\")\n", + "print(f\"PyTorch version: {torch.__version__}\")" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "model_zoo.refresh_bioimageio_zoo_urls()" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
nameurlpathdescriptionresolutiondimensionalitymodalityrecommended_patch_sizeoutput_typedoiadded_byname_displayrdf_sourcesupported
id
affable-sharkNucleiSegmentationBoundaryModelNoneNoneNoneNoneNoneNoneNoneNoneNonebioimage.ioaffable-shark : NucleiSegmentationB...https://uk1s3.embassy.ebi.ac.uk/public-dataset...False
chatty-frogStarDist H&E Nuclei SegmentationNoneNoneNoneNoneNoneNoneNoneNoneNonebioimage.iochatty-frog : StarDist H&E Nuclei...https://uk1s3.embassy.ebi.ac.uk/public-dataset...False
hiding-tigerLiveCellSegmentationBoundaryModelNoneNoneNoneNoneNoneNoneNoneNoneNonebioimage.iohiding-tiger : LiveCellSegmentatio...https://uk1s3.embassy.ebi.ac.uk/public-dataset...False
impartial-shrimpNeuron Segmentation in EM (Membrane Prediction)NoneNoneNoneNoneNoneNoneNoneNoneNonebioimage.ioimpartial-shrimp : Neuron Segmentation...https://uk1s3.embassy.ebi.ac.uk/public-dataset...False
discreet-roosterPancreatic Phase Contrast Cell Segmentation (U...NoneNoneNoneNoneNoneNoneNoneNoneNonebioimage.iodiscreet-rooster : Pancreatic Phase Co...https://uk1s3.embassy.ebi.ac.uk/public-dataset...False
.............................................
stupendous-sheep(Empanada) 2D Instance Mitochondrial Segmentat...NoneNoneNoneNoneNoneNoneNoneNoneNonebioimage.iostupendous-sheep : (Empanada) 2D Insta...https://uk1s3.embassy.ebi.ac.uk/public-dataset...False
pioneering-goatUniFMIRProjectionOnFlyWingNoneNoneNoneNoneNoneNoneNoneNoneNonebioimage.iopioneering-goat : UniFMIRProjectionOn...https://uk1s3.embassy.ebi.ac.uk/public-dataset...False
greedy-sharkUniFMIRVolumetricReconstructionOnVCDNoneNoneNoneNoneNoneNoneNoneNoneNonebioimage.iogreedy-shark : UniFMIRVolumetricRe...https://uk1s3.embassy.ebi.ac.uk/public-dataset...False
lucky-foxUniFMIRIsotropicReconstructionOnLiverNoneNoneNoneNoneNoneNoneNoneNoneNonebioimage.iolucky-fox : UniFMIRIsotropicRec...https://uk1s3.embassy.ebi.ac.uk/public-dataset...False
frank-water-buffaloUniFMIRDenoiseOnPlanariaNoneNoneNoneNoneNoneNoneNoneNoneNonebioimage.iofrank-water-buffalo : UniFMIRDenoiseOnPla...https://uk1s3.embassy.ebi.ac.uk/public-dataset...False
\n", + "

68 rows × 14 columns

\n", + "
" + ], + "text/plain": [ + " name url \\\n", + "id \n", + "affable-shark NucleiSegmentationBoundaryModel None \n", + "chatty-frog StarDist H&E Nuclei Segmentation None \n", + "hiding-tiger LiveCellSegmentationBoundaryModel None \n", + "impartial-shrimp Neuron Segmentation in EM (Membrane Prediction) None \n", + "discreet-rooster Pancreatic Phase Contrast Cell Segmentation (U... None \n", + "... ... ... \n", + "stupendous-sheep (Empanada) 2D Instance Mitochondrial Segmentat... None \n", + "pioneering-goat UniFMIRProjectionOnFlyWing None \n", + "greedy-shark UniFMIRVolumetricReconstructionOnVCD None \n", + "lucky-fox UniFMIRIsotropicReconstructionOnLiver None \n", + "frank-water-buffalo UniFMIRDenoiseOnPlanaria None \n", + "\n", + " path description resolution dimensionality modality \\\n", + "id \n", + "affable-shark None None None None None \n", + "chatty-frog None None None None None \n", + "hiding-tiger None None None None None \n", + "impartial-shrimp None None None None None \n", + "discreet-rooster None None None None None \n", + "... ... ... ... ... ... \n", + "stupendous-sheep None None None None None \n", + "pioneering-goat None None None None None \n", + "greedy-shark None None None None None \n", + "lucky-fox None None None None None \n", + "frank-water-buffalo None None None None None \n", + "\n", + " recommended_patch_size output_type doi added_by \\\n", + "id \n", + "affable-shark None None None bioimage.io \n", + "chatty-frog None None None bioimage.io \n", + "hiding-tiger None None None bioimage.io \n", + "impartial-shrimp None None None bioimage.io \n", + "discreet-rooster None None None bioimage.io \n", + "... ... ... ... ... \n", + "stupendous-sheep None None None bioimage.io \n", + "pioneering-goat None None None bioimage.io \n", + "greedy-shark None None None bioimage.io \n", + "lucky-fox None None None bioimage.io \n", + "frank-water-buffalo None None None bioimage.io \n", + "\n", + " name_display \\\n", + "id \n", + "affable-shark affable-shark : NucleiSegmentationB... \n", + "chatty-frog chatty-frog : StarDist H&E Nuclei... \n", + "hiding-tiger hiding-tiger : LiveCellSegmentatio... \n", + "impartial-shrimp impartial-shrimp : Neuron Segmentation... \n", + "discreet-rooster discreet-rooster : Pancreatic Phase Co... \n", + "... ... \n", + "stupendous-sheep stupendous-sheep : (Empanada) 2D Insta... \n", + "pioneering-goat pioneering-goat : UniFMIRProjectionOn... \n", + "greedy-shark greedy-shark : UniFMIRVolumetricRe... \n", + "lucky-fox lucky-fox : UniFMIRIsotropicRec... \n", + "frank-water-buffalo frank-water-buffalo : UniFMIRDenoiseOnPla... \n", + "\n", + " rdf_source \\\n", + "id \n", + "affable-shark https://uk1s3.embassy.ebi.ac.uk/public-dataset... \n", + "chatty-frog https://uk1s3.embassy.ebi.ac.uk/public-dataset... \n", + "hiding-tiger https://uk1s3.embassy.ebi.ac.uk/public-dataset... \n", + "impartial-shrimp https://uk1s3.embassy.ebi.ac.uk/public-dataset... \n", + "discreet-rooster https://uk1s3.embassy.ebi.ac.uk/public-dataset... \n", + "... ... \n", + "stupendous-sheep https://uk1s3.embassy.ebi.ac.uk/public-dataset... \n", + "pioneering-goat https://uk1s3.embassy.ebi.ac.uk/public-dataset... \n", + "greedy-shark https://uk1s3.embassy.ebi.ac.uk/public-dataset... \n", + "lucky-fox https://uk1s3.embassy.ebi.ac.uk/public-dataset... \n", + "frank-water-buffalo https://uk1s3.embassy.ebi.ac.uk/public-dataset... \n", + "\n", + " supported \n", + "id \n", + "affable-shark False \n", + "chatty-frog False \n", + "hiding-tiger False \n", + "impartial-shrimp False \n", + "discreet-rooster False \n", + "... ... \n", + "stupendous-sheep False \n", + "pioneering-goat False \n", + "greedy-shark False \n", + "lucky-fox False \n", + "frank-water-buffalo False \n", + "\n", + "[68 rows x 14 columns]" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "model_zoo.models_bioimageio" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['affable-shark',\n", + " 'affectionate-cow',\n", + " 'ambitious-ant',\n", + " 'ambitious-sloth',\n", + " 'amiable-crocodile',\n", + " 'charismatic-whale',\n", + " 'chatty-frog',\n", + " 'committed-turkey',\n", + " 'conscientious-seashell',\n", + " 'courteous-otter',\n", + " 'creative-panda',\n", + " 'dazzling-spider',\n", + " 'decisive-panda',\n", + " 'determined-chipmunk',\n", + " 'determined-hedgehog',\n", + " 'diplomatic-bug',\n", + " 'discreet-rooster',\n", + " 'dynamic-t-rex',\n", + " 'easy-going-sauropod',\n", + " 'efficient-chipmunk',\n", + " 'emotional-cricket',\n", + " 'faithful-chicken',\n", + " 'famous-fish',\n", + " 'fearless-crab',\n", + " 'frank-water-buffalo',\n", + " 'greedy-shark',\n", + " 'greedy-whale',\n", + " 'happy-elephant',\n", + " 'hiding-blowfish',\n", + " 'hiding-tiger',\n", + " 'humorous-crab',\n", + " 'humorous-fox',\n", + " 'humorous-owl',\n", + " 'idealistic-rat',\n", + " 'impartial-shark',\n", + " 'impartial-shrimp',\n", + " 'independent-shrimp',\n", + " 'joyful-deer',\n", + " 'kind-seashell',\n", + " 'laid-back-lobster',\n", + " 'loyal-parrot',\n", + " 'loyal-squid',\n", + " 'lucky-fox',\n", + " 'modest-octopus',\n", + " 'naked-microbe',\n", + " 'nice-peacock',\n", + " 'noisy-fish',\n", + " 'noisy-hedgehog',\n", + " 'noisy-ox',\n", + " 'non-judgemental-eagle',\n", + " 'organized-badger',\n", + " 'organized-cricket',\n", + " 'passionate-t-rex',\n", + " 'philosophical-panda',\n", + " 'pioneering-goat',\n", + " 'pioneering-rhino',\n", + " 'placid-llama',\n", + " 'polite-pig',\n", + " 'powerful-chipmunk',\n", + " 'powerful-fish',\n", + " 'resourceful-lizard',\n", + " 'shivering-raccoon',\n", + " 'straightforward-crocodile',\n", + " 'stupendous-sheep',\n", + " 'thoughtful-turtle',\n", + " 'wild-rhino',\n", + " 'wild-whale',\n", + " 'willing-hedgehog']" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# get all index\n", + "\n", + "sorted(model_zoo.models_bioimageio.index.to_list())" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['efficient-chipmunk',\n", + " 'emotional-cricket',\n", + " 'loyal-squid',\n", + " 'noisy-fish',\n", + " 'passionate-t-rex',\n", + " 'pioneering-rhino',\n", + " 'powerful-fish',\n", + " 'thoughtful-turtle']" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# get all index where `supported` is True:\n", + "\n", + "sorted(model_zoo.models_bioimageio[model_zoo.models_bioimageio[\"supported\"]].index.to_list())" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "True" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "'efficient-chipmunk' in model_zoo.models_bioimageio.index" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "name PlantSeg Plant Nuclei 3D UNet\n", + "url None\n", + "path None\n", + "description None\n", + "resolution None\n", + "dimensionality None\n", + "modality None\n", + "recommended_patch_size None\n", + "output_type None\n", + "doi None\n", + "added_by bioimage.io\n", + "name_display efficient-chipmunk : PlantSeg Plant Nucl...\n", + "rdf_source https://uk1s3.embassy.ebi.ac.uk/public-dataset...\n", + "supported True\n", + "Name: efficient-chipmunk, dtype: object" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "model_zoo.models_bioimageio.loc['efficient-chipmunk']" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "Url('https://uk1s3.embassy.ebi.ac.uk/public-datasets/bioimage.io/efficient-chipmunk/1/files/rdf.yaml')" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "model_zoo.models_bioimageio.loc['efficient-chipmunk']['rdf_source']" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "Url('https://uk1s3.embassy.ebi.ac.uk/public-datasets/bioimage.io/emotional-cricket/1.1/files/rdf.yaml')" + ] + }, + "execution_count": 20, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "model_zoo.models_bioimageio.loc['emotional-cricket']['rdf_source']" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "id\n", + "affable-shark NucleiSegmentationBoundaryModel\n", + "chatty-frog StarDist H&E Nuclei Segmentation\n", + "hiding-tiger LiveCellSegmentationBoundaryModel\n", + "impartial-shrimp Neuron Segmentation in EM (Membrane Prediction)\n", + "discreet-rooster Pancreatic Phase Contrast Cell Segmentation (U...\n", + " ... \n", + "stupendous-sheep (Empanada) 2D Instance Mitochondrial Segmentat...\n", + "pioneering-goat UniFMIRProjectionOnFlyWing\n", + "greedy-shark UniFMIRVolumetricReconstructionOnVCD\n", + "lucky-fox UniFMIRIsotropicReconstructionOnLiver\n", + "frank-water-buffalo UniFMIRDenoiseOnPlanaria\n", + "Name: name, Length: 68, dtype: object" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "model_zoo.models_bioimageio['name']" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "pydantic_core._pydantic_core.Url" + ] + }, + "execution_count": 11, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "type(model_zoo.models_bioimageio.at['efficient-chipmunk', 'rdf_source'])" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "pydantic_core._pydantic_core.Url" + ] + }, + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "type(model_zoo.models_bioimageio.loc['efficient-chipmunk']['rdf_source'])" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [], + "source": [ + "import numpy as np\n", + "from bioimageio.core.prediction import predict\n", + "from bioimageio.core.sample import Sample\n", + "from bioimageio.core.tensor import Tensor\n", + "from bioimageio.spec.model.v0_5 import TensorId" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "array = np.random.randint(0, 255, (128, 128, 128), dtype=np.uint8)\n", + "dims = ('z', 'y', 'x')\n", + "sample = Sample(members={TensorId('a'): Tensor(array, dims)}, stat={}, id='try')\n", + "# sample.members[TensorId('a')].data" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "Url('https://uk1s3.embassy.ebi.ac.uk/public-datasets/bioimage.io/philosophical-panda/0.0.11/files/rdf.yaml')" + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "model_zoo.models_bioimageio.at['philosophical-panda', 'rdf_source']" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Downloading data from 'https://uk1s3.embassy.ebi.ac.uk/public-datasets/bioimage.io/philosophical-panda/0.0.11/files/rdf.yaml' to file '/Users/qin/Library/Caches/bioimageio/520a69782c7dafb6478e43ebcc4679b0-rdf.yaml'.\n", + "100%|█████████████████████████████████████| 13.5k/13.5k [00:00<00:00, 35.7MB/s]\n", + "SHA256 hash of downloaded file: bbad75237ecf4f9d9f6259b13b97fc01b5cbeb4e3ea672e72826608d68197a32\n", + "Use this value as the 'known_hash' argument of 'pooch.retrieve' to ensure that the file hasn't changed if it is downloaded again in the future.\n", + "Downloading data from 'https://uk1s3.embassy.ebi.ac.uk/public-datasets/bioimage.io/philosophical-panda/0.0.11/files/README.md' to file '/Users/qin/Library/Caches/bioimageio/42620dae3e3850cefbf0475c7cf590dd-README.md'.\n", + "100%|██████████████████████████████████████████| 431/431 [00:00<00:00, 658kB/s]\n", + "SHA256 hash of downloaded file: fc6e1292ca309bedaca504260cecc9a7bc9f26e9328eb7a051f82a2ceec475e3\n", + "Use this value as the 'known_hash' argument of 'pooch.retrieve' to ensure that the file hasn't changed if it is downloaded again in the future.\n", + "Downloading data from 'https://uk1s3.embassy.ebi.ac.uk/public-datasets/bioimage.io/philosophical-panda/0.0.11/files/test_input.npy' to file '/Users/qin/Library/Caches/bioimageio/101853864f8c8e986b2819c9ac44d0f9-test_input.npy'.\n", + "100%|█████████████████████████████████████| 5.53M/5.53M [00:00<00:00, 8.91GB/s]\n", + "computing SHA256 of 101853864f8c8e986b2819c9ac44d0f9-test_input.npy (result: 6810255f5b5260fe39153f2192bedf30d9899ec4e770976b7813116c467579f0): 100%|██████████| 5529728/5529728 [00:00<00:00, 1220895945.11it/s]\n", + "Downloading data from 'https://uk1s3.embassy.ebi.ac.uk/public-datasets/bioimage.io/philosophical-panda/0.0.11/files/test_output.npy' to file '/Users/qin/Library/Caches/bioimageio/57b964a123db3ff8400bcce3ef902b18-test_output.npy'.\n", + "100%|█████████████████████████████████████| 8.29M/8.29M [00:00<00:00, 11.9GB/s]\n", + "computing SHA256 of 57b964a123db3ff8400bcce3ef902b18-test_output.npy (result: d802e3024da80bff93a9ec50fbe50b9c3946534aab1b60b911511111a8e2dbca): 100%|██████████| 8294528/8294528 [00:00<00:00, 1690711569.64it/s]\n", + "Downloading data from 'https://uk1s3.embassy.ebi.ac.uk/public-datasets/bioimage.io/philosophical-panda/0.0.11/files/test_style.npy' to file '/Users/qin/Library/Caches/bioimageio/0b712fa8abf8707021a71747e726bf7c-test_style.npy'.\n", + "100%|█████████████████████████████████████| 76.9k/76.9k [00:00<00:00, 71.6MB/s]\n", + "computing SHA256 of 0b712fa8abf8707021a71747e726bf7c-test_style.npy (result: ab464b406f9050561b40f7d76700ab5edf3aca97e31fe9a6069a51aeeca8bc81): 100%|██████████| 76928/76928 [00:00<00:00, 106734838.94it/s]\n", + "Downloading data from 'https://uk1s3.embassy.ebi.ac.uk/public-datasets/bioimage.io/philosophical-panda/0.0.11/files/test_downsampled_0.npy' to file '/Users/qin/Library/Caches/bioimageio/d0abd68ef3844b0d6fbed811e0fed878-test_downsampled_0.npy'.\n", + "100%|█████████████████████████████████████| 88.5M/88.5M [00:00<00:00, 82.3GB/s]\n", + "computing SHA256 of d0abd68ef3844b0d6fbed811e0fed878-test_downsampled_0.npy (result: 67df53fb440e94dbb9c8e4003dcbde158646a7975c4878cacdd251e1fcfb4225): 100%|██████████| 88473728/88473728 [00:00<00:00, 2235146402.88it/s]\n", + "Downloading data from 'https://uk1s3.embassy.ebi.ac.uk/public-datasets/bioimage.io/philosophical-panda/0.0.11/files/test_downsampled_1.npy' to file '/Users/qin/Library/Caches/bioimageio/3765cac1d92a49daf0d6ec949919aeb1-test_downsampled_1.npy'.\n", + "100%|█████████████████████████████████████| 44.2M/44.2M [00:00<00:00, 52.2GB/s]\n", + "computing SHA256 of 3765cac1d92a49daf0d6ec949919aeb1-test_downsampled_1.npy (result: cb4addbd763d96731ebd18ed001b87ab7195ec9198f01a753a363a06c27bfb1c): 100%|██████████| 44236928/44236928 [00:00<00:00, 2150700977.83it/s]\n", + "Downloading data from 'https://uk1s3.embassy.ebi.ac.uk/public-datasets/bioimage.io/philosophical-panda/0.0.11/files/test_downsampled_2.npy' to file '/Users/qin/Library/Caches/bioimageio/7251078a2afa8713384a3103878dd09d-test_downsampled_2.npy'.\n", + "100%|█████████████████████████████████████| 22.1M/22.1M [00:00<00:00, 24.9GB/s]\n", + "computing SHA256 of 7251078a2afa8713384a3103878dd09d-test_downsampled_2.npy (result: 9c0225b94d84fcc3adfb9a73eef1303d6adb318b57a5a801e0e2e1638b458e72): 100%|██████████| 22118528/22118528 [00:00<00:00, 2035139420.08it/s]\n", + "Downloading data from 'https://uk1s3.embassy.ebi.ac.uk/public-datasets/bioimage.io/philosophical-panda/0.0.11/files/test_downsampled_3.npy' to file '/Users/qin/Library/Caches/bioimageio/e434cdc3ea3e7ecfb752cdc001617875-test_downsampled_3.npy'.\n", + "100%|█████████████████████████████████████| 11.1M/11.1M [00:00<00:00, 15.7GB/s]\n", + "computing SHA256 of e434cdc3ea3e7ecfb752cdc001617875-test_downsampled_3.npy (result: 1ea789ff37d47197c847b585799f7d063e7592b0c5e9c3094fd0e3ac209b7fc2): 100%|██████████| 11059328/11059328 [00:00<00:00, 1367718816.68it/s]\n", + "Downloading data from 'https://uk1s3.embassy.ebi.ac.uk/public-datasets/bioimage.io/philosophical-panda/0.0.11/files/cpnet_wrapper.py' to file '/Users/qin/Library/Caches/bioimageio/00bd170d6c9de6a391d6869f59058847-cpnet_wrapper.py'.\n", + "100%|█████████████████████████████████████| 11.1k/11.1k [00:00<00:00, 18.9MB/s]\n", + "computing SHA256 of 00bd170d6c9de6a391d6869f59058847-cpnet_wrapper.py (result: b8b947cdd0ea8f5b98bd7be5f12f38bb1ea1ebe0b455c62d9a6389cd21d134bf): 100%|██████████| 11053/11053 [00:00<00:00, 15037185.25it/s]\n", + "Downloading data from 'https://uk1s3.embassy.ebi.ac.uk/public-datasets/bioimage.io/philosophical-panda/0.0.11/files/cp_state_dict_1135_gold.pth' to file '/Users/qin/Library/Caches/bioimageio/8dbb20d5a3cb3a3dfdb5101a671861ce-cp_state_dict_1135_gold.pth'.\n", + "100%|█████████████████████████████████████| 26.6M/26.6M [00:00<00:00, 43.3GB/s]\n", + "computing SHA256 of 8dbb20d5a3cb3a3dfdb5101a671861ce-cp_state_dict_1135_gold.pth (result: 26c277f3b8f6ca5aab30b4b0a832601aea60183cbed1c2333576f4135a643eb2): 100%|██████████| 26556687/26556687 [00:00<00:00, 2103662363.99it/s]\n", + "Downloading data from 'https://uk1s3.embassy.ebi.ac.uk/public-datasets/bioimage.io/philosophical-panda/0.0.11/files/cp_traced_1135_gold.pt' to file '/Users/qin/Library/Caches/bioimageio/17fee110c39ccad7c3cb36d00d2fdd2c-cp_traced_1135_gold.pt'.\n", + "100%|█████████████████████████████████████| 26.8M/26.8M [00:00<00:00, 26.3GB/s]\n", + "computing SHA256 of 17fee110c39ccad7c3cb36d00d2fdd2c-cp_traced_1135_gold.pt (result: f61bae146ab522902350eadda1d509ac1037726fe6d7fb63f6a8a314021d63e7): 100%|██████████| 26812339/26812339 [00:00<00:00, 2032773000.69it/s]\n" + ] + }, + { + "ename": "AttributeError", + "evalue": "'NoneType' object has no attribute 'dim'", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mAttributeError\u001b[0m Traceback (most recent call last)", + "Cell \u001b[0;32mIn[15], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m temp \u001b[38;5;241m=\u001b[39m \u001b[43mpredict\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 2\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;66;43;03m# model=model_zoo.models_bioimageio.at['emotional-cricket', 'rdf_source'],\u001b[39;49;00m\n\u001b[1;32m 3\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;66;43;03m# model=model_zoo.models_bioimageio.at['efficient-chipmunk', 'rdf_source'],\u001b[39;49;00m\n\u001b[1;32m 4\u001b[0m \u001b[43m \u001b[49m\u001b[43mmodel\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmodel_zoo\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmodels_bioimageio\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mat\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43mphilosophical-panda\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43mrdf_source\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 5\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;66;43;03m# model='/Users/qin/Downloads/rdf.yaml',\u001b[39;49;00m\n\u001b[1;32m 6\u001b[0m \u001b[43m \u001b[49m\u001b[43minputs\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43msample\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 7\u001b[0m \u001b[43m \u001b[49m\u001b[43msample_id\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43msample\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[1;32m 8\u001b[0m \u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/bioimageio/core/prediction.py:114\u001b[0m, in \u001b[0;36mpredict\u001b[0;34m(model, inputs, sample_id, blocksize_parameter, input_block_shape, skip_preprocessing, skip_postprocessing, save_output_path)\u001b[0m\n\u001b[1;32m 107\u001b[0m output \u001b[38;5;241m=\u001b[39m pp\u001b[38;5;241m.\u001b[39mpredict_sample_with_blocking(\n\u001b[1;32m 108\u001b[0m sample,\n\u001b[1;32m 109\u001b[0m skip_preprocessing\u001b[38;5;241m=\u001b[39mskip_preprocessing,\n\u001b[1;32m 110\u001b[0m skip_postprocessing\u001b[38;5;241m=\u001b[39mskip_postprocessing,\n\u001b[1;32m 111\u001b[0m ns\u001b[38;5;241m=\u001b[39mblocksize_parameter,\n\u001b[1;32m 112\u001b[0m )\n\u001b[1;32m 113\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m--> 114\u001b[0m output \u001b[38;5;241m=\u001b[39m \u001b[43mpp\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mpredict_sample_without_blocking\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 115\u001b[0m \u001b[43m \u001b[49m\u001b[43msample\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 116\u001b[0m \u001b[43m \u001b[49m\u001b[43mskip_preprocessing\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mskip_preprocessing\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 117\u001b[0m \u001b[43m \u001b[49m\u001b[43mskip_postprocessing\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mskip_postprocessing\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 118\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 119\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m save_output_path:\n\u001b[1;32m 120\u001b[0m save_sample(save_output_path, output)\n", + "File \u001b[0;32m~/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/bioimageio/core/_prediction_pipeline.py:160\u001b[0m, in \u001b[0;36mPredictionPipeline.predict_sample_without_blocking\u001b[0;34m(self, sample, skip_preprocessing, skip_postprocessing)\u001b[0m\n\u001b[1;32m 152\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m skip_preprocessing:\n\u001b[1;32m 153\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mapply_preprocessing(sample)\n\u001b[1;32m 155\u001b[0m output \u001b[38;5;241m=\u001b[39m Sample(\n\u001b[1;32m 156\u001b[0m members\u001b[38;5;241m=\u001b[39m{\n\u001b[1;32m 157\u001b[0m out_id: out\n\u001b[1;32m 158\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m out_id, out \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mzip\u001b[39m(\n\u001b[1;32m 159\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_output_ids,\n\u001b[0;32m--> 160\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_adapter\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mforward\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 161\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43msample\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmembers\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget\u001b[49m\u001b[43m(\u001b[49m\u001b[43min_id\u001b[49m\u001b[43m)\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mfor\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43min_id\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;129;43;01min\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_input_ids\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 162\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m,\n\u001b[1;32m 163\u001b[0m )\n\u001b[1;32m 164\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m out \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[1;32m 165\u001b[0m },\n\u001b[1;32m 166\u001b[0m stat\u001b[38;5;241m=\u001b[39msample\u001b[38;5;241m.\u001b[39mstat,\n\u001b[1;32m 167\u001b[0m \u001b[38;5;28mid\u001b[39m\u001b[38;5;241m=\u001b[39msample\u001b[38;5;241m.\u001b[39mid,\n\u001b[1;32m 168\u001b[0m )\n\u001b[1;32m 169\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m skip_postprocessing:\n\u001b[1;32m 170\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mapply_postprocessing(output)\n", + "File \u001b[0;32m~/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/bioimageio/core/model_adapters/_pytorch_model_adapter.py:71\u001b[0m, in \u001b[0;36mPytorchModelAdapter.forward\u001b[0;34m(self, *input_tensors)\u001b[0m\n\u001b[1;32m 60\u001b[0m tensors \u001b[38;5;241m=\u001b[39m [\n\u001b[1;32m 61\u001b[0m (\n\u001b[1;32m 62\u001b[0m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 68\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m t \u001b[38;5;129;01min\u001b[39;00m tensors\n\u001b[1;32m 69\u001b[0m ]\n\u001b[1;32m 70\u001b[0m result: Union[Tuple[Any, \u001b[38;5;241m.\u001b[39m\u001b[38;5;241m.\u001b[39m\u001b[38;5;241m.\u001b[39m], List[Any], Any]\n\u001b[0;32m---> 71\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_network\u001b[49m\u001b[43m(\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;66;43;03m# pyright: ignore[reportUnknownVariableType]\u001b[39;49;00m\n\u001b[1;32m 72\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mtensors\u001b[49m\n\u001b[1;32m 73\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 74\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(result, (\u001b[38;5;28mtuple\u001b[39m, \u001b[38;5;28mlist\u001b[39m)):\n\u001b[1;32m 75\u001b[0m result \u001b[38;5;241m=\u001b[39m [result]\n", + "File \u001b[0;32m~/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/torch/nn/modules/module.py:1511\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1509\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[1;32m 1510\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m-> 1511\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/torch/nn/modules/module.py:1520\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1515\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m 1516\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[1;32m 1517\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[1;32m 1518\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1519\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1520\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1522\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m 1523\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n", + "File \u001b[0;32m~/Library/Caches/bioimageio/00bd170d6c9de6a391d6869f59058847-cpnet_wrapper.py:283\u001b[0m, in \u001b[0;36mCPnetBioImageIO.forward\u001b[0;34m(self, x)\u001b[0m\n\u001b[1;32m 273\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mforward\u001b[39m(\u001b[38;5;28mself\u001b[39m, x):\n\u001b[1;32m 274\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[1;32m 275\u001b[0m \u001b[38;5;124;03m Perform a forward pass of the CPnet model and return unpacked tensors.\u001b[39;00m\n\u001b[1;32m 276\u001b[0m \n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 281\u001b[0m \u001b[38;5;124;03m tuple: A tuple containing the output tensor, style tensor, and downsampled tensors.\u001b[39;00m\n\u001b[1;32m 282\u001b[0m \u001b[38;5;124;03m \"\"\"\u001b[39;00m\n\u001b[0;32m--> 283\u001b[0m output_tensor, style_tensor, downsampled_tensors \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43msuper\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mforward\u001b[49m\u001b[43m(\u001b[49m\u001b[43mx\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 284\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m output_tensor, style_tensor, \u001b[38;5;241m*\u001b[39mdownsampled_tensors\n", + "File \u001b[0;32m~/Library/Caches/bioimageio/00bd170d6c9de6a391d6869f59058847-cpnet_wrapper.py:207\u001b[0m, in \u001b[0;36mCPnet.forward\u001b[0;34m(self, data)\u001b[0m\n\u001b[1;32m 205\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mmkldnn:\n\u001b[1;32m 206\u001b[0m data \u001b[38;5;241m=\u001b[39m data\u001b[38;5;241m.\u001b[39mto_mkldnn()\n\u001b[0;32m--> 207\u001b[0m T0 \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdownsample\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdata\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 208\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mmkldnn:\n\u001b[1;32m 209\u001b[0m style \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mmake_style(T0[\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m]\u001b[38;5;241m.\u001b[39mto_dense())\n", + "File \u001b[0;32m~/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/torch/nn/modules/module.py:1511\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1509\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[1;32m 1510\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m-> 1511\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/torch/nn/modules/module.py:1520\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1515\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m 1516\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[1;32m 1517\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[1;32m 1518\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1519\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1520\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1522\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m 1523\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n", + "File \u001b[0;32m~/Library/Caches/bioimageio/00bd170d6c9de6a391d6869f59058847-cpnet_wrapper.py:60\u001b[0m, in \u001b[0;36mdownsample.forward\u001b[0;34m(self, x)\u001b[0m\n\u001b[1;32m 58\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 59\u001b[0m y \u001b[38;5;241m=\u001b[39m x\n\u001b[0;32m---> 60\u001b[0m xd\u001b[38;5;241m.\u001b[39mappend(\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdown\u001b[49m\u001b[43m[\u001b[49m\u001b[43mn\u001b[49m\u001b[43m]\u001b[49m\u001b[43m(\u001b[49m\u001b[43my\u001b[49m\u001b[43m)\u001b[49m)\n\u001b[1;32m 61\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m xd\n", + "File \u001b[0;32m~/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/torch/nn/modules/module.py:1511\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1509\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[1;32m 1510\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m-> 1511\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/torch/nn/modules/module.py:1520\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1515\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m 1516\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[1;32m 1517\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[1;32m 1518\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1519\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1520\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1522\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m 1523\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n", + "File \u001b[0;32m~/Library/Caches/bioimageio/00bd170d6c9de6a391d6869f59058847-cpnet_wrapper.py:37\u001b[0m, in \u001b[0;36mresdown.forward\u001b[0;34m(self, x)\u001b[0m\n\u001b[1;32m 36\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mforward\u001b[39m(\u001b[38;5;28mself\u001b[39m, x):\n\u001b[0;32m---> 37\u001b[0m x \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mproj\u001b[49m\u001b[43m(\u001b[49m\u001b[43mx\u001b[49m\u001b[43m)\u001b[49m \u001b[38;5;241m+\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mconv[\u001b[38;5;241m1\u001b[39m](\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mconv[\u001b[38;5;241m0\u001b[39m](x))\n\u001b[1;32m 38\u001b[0m x \u001b[38;5;241m=\u001b[39m x \u001b[38;5;241m+\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mconv[\u001b[38;5;241m3\u001b[39m](\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mconv[\u001b[38;5;241m2\u001b[39m](x))\n\u001b[1;32m 39\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m x\n", + "File \u001b[0;32m~/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/torch/nn/modules/module.py:1511\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1509\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[1;32m 1510\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m-> 1511\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/torch/nn/modules/module.py:1520\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1515\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m 1516\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[1;32m 1517\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[1;32m 1518\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1519\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1520\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1522\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m 1523\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n", + "File \u001b[0;32m~/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/torch/nn/modules/container.py:217\u001b[0m, in \u001b[0;36mSequential.forward\u001b[0;34m(self, input)\u001b[0m\n\u001b[1;32m 215\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mforward\u001b[39m(\u001b[38;5;28mself\u001b[39m, \u001b[38;5;28minput\u001b[39m):\n\u001b[1;32m 216\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m module \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mself\u001b[39m:\n\u001b[0;32m--> 217\u001b[0m \u001b[38;5;28minput\u001b[39m \u001b[38;5;241m=\u001b[39m \u001b[43mmodule\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[1;32m 218\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28minput\u001b[39m\n", + "File \u001b[0;32m~/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/torch/nn/modules/module.py:1511\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1509\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[1;32m 1510\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m-> 1511\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/torch/nn/modules/module.py:1520\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1515\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m 1516\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[1;32m 1517\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[1;32m 1518\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1519\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1520\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1522\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m 1523\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n", + "File \u001b[0;32m~/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/torch/nn/modules/batchnorm.py:142\u001b[0m, in \u001b[0;36m_BatchNorm.forward\u001b[0;34m(self, input)\u001b[0m\n\u001b[1;32m 141\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mforward\u001b[39m(\u001b[38;5;28mself\u001b[39m, \u001b[38;5;28minput\u001b[39m: Tensor) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m Tensor:\n\u001b[0;32m--> 142\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_check_input_dim\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[1;32m 144\u001b[0m \u001b[38;5;66;03m# exponential_average_factor is set to self.momentum\u001b[39;00m\n\u001b[1;32m 145\u001b[0m \u001b[38;5;66;03m# (when it is available) only so that it gets updated\u001b[39;00m\n\u001b[1;32m 146\u001b[0m \u001b[38;5;66;03m# in ONNX graph when this node is exported to ONNX.\u001b[39;00m\n\u001b[1;32m 147\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mmomentum \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n", + "File \u001b[0;32m~/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/torch/nn/modules/batchnorm.py:419\u001b[0m, in \u001b[0;36mBatchNorm2d._check_input_dim\u001b[0;34m(self, input)\u001b[0m\n\u001b[1;32m 418\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_check_input_dim\u001b[39m(\u001b[38;5;28mself\u001b[39m, \u001b[38;5;28minput\u001b[39m):\n\u001b[0;32m--> 419\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28;43minput\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdim\u001b[49m() \u001b[38;5;241m!=\u001b[39m \u001b[38;5;241m4\u001b[39m:\n\u001b[1;32m 420\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mexpected 4D input (got \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28minput\u001b[39m\u001b[38;5;241m.\u001b[39mdim()\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124mD input)\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n", + "\u001b[0;31mAttributeError\u001b[0m: 'NoneType' object has no attribute 'dim'" + ] + } + ], + "source": [ + "temp = predict(\n", + " # model=model_zoo.models_bioimageio.at['emotional-cricket', 'rdf_source'],\n", + " # model=model_zoo.models_bioimageio.at['efficient-chipmunk', 'rdf_source'],\n", + " model=model_zoo.models_bioimageio.at['philosophical-panda', 'rdf_source'],\n", + " # model='/Users/qin/Downloads/rdf.yaml',\n", + " inputs=sample,\n", + " sample_id='sample',\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "((1, 32, 64, 64), (64, 64, 32, 1))" + ] + }, + "execution_count": 23, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "np.random.rand(1, 32, 64, 64).shape, np.random.rand(64, 64, 32, 1).shape" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "from plantseg.functionals.dataprocessing.dataprocessing import ImageLayout" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "typing.Literal['ZYX', 'YX', 'CZYX', 'CYX']" + ] + }, + "execution_count": 2, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "ImageLayout" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "('Z', 'Y', 'X')" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "tuple('ZYX')" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "INFO: P [MainThread] 2024-12-14 01:09:58,630 plantseg - Logger configured at initialisation. PlantSeg logger name: plantseg\n" + ] + } + ], + "source": [ + "from pathlib import Path\n", + "from plantseg.functionals.prediction.prediction import biio_prediction\n", + "from plantseg.io import smart_load" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(453, 800, 800)" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "raw = smart_load(\n", + " Path('/Users/qin/Documents/Work/Side_rAP2_16LDs_SAM4_nuclei_Z0.400_X0.291_Y0.291_Sz453_Sx800_Sy800.tif')\n", + ")\n", + "raw.shape" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(128, 128, 128)" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "raw = smart_load(\n", + " Path('/Users/qin/Documents/Work/Side_rAP2_16LDs_SAM4_nuclei_Z0.400_X0.291_Y0.291_Sz453_Sx800_Sy800.tif')\n", + ")[200:200+128, 300:300+128, 300:300+128]\n", + "raw.shape" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[32m2024-12-14 00:35:41.719\u001b[0m | \u001b[1mINFO \u001b[0m | \u001b[36mbioimageio.spec._internal.io_utils\u001b[0m:\u001b[36mopen_bioimageio_yaml\u001b[0m:\u001b[36m131\u001b[0m - \u001b[1mloading emotional-cricket from https://uk1s3.embassy.ebi.ac.uk/public-datasets/bioimage.io/emotional-cricket/1.1/files/rdf.yaml\u001b[0m\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "INFO: P [MainThread] 2024-12-14 00:35:41,809 plantseg.functionals.prediction.prediction - model expects these inputs: ['raw']\n" + ] + } + ], + "source": [ + "out = biio_prediction(\n", + " raw=raw,\n", + " input_layout='ZYX',\n", + " # model_id='efficient-chipmunk',\n", + " model_id='emotional-cricket',\n", + " # model_id='/Users/qin/Downloads/efficient-chipmunk.yaml',\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 32, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "True" + ] + }, + "execution_count": 32, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "len(out.members) == 1" + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['output0']" + ] + }, + "execution_count": 28, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "list(out.members)" + ] + }, + { + "cell_type": "code", + "execution_count": 31, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[array([[[[[1.23823965e-02, 3.71413166e-03, 2.64202990e-03, ...,\n", + " 2.41754763e-03, 4.86477558e-03, 2.36471929e-02],\n", + " [2.31429259e-03, 7.02380203e-04, 4.09954780e-04, ...,\n", + " 2.52719648e-04, 7.57241389e-04, 5.46071166e-03],\n", + " [1.33902160e-03, 3.50114511e-04, 2.14061569e-04, ...,\n", + " 7.40970063e-05, 2.46272044e-04, 2.46915710e-03],\n", + " ...,\n", + " [8.08374316e-04, 7.53455024e-05, 2.44885123e-05, ...,\n", + " 8.84886977e-05, 3.21889500e-04, 2.94925272e-03],\n", + " [1.35905389e-03, 1.61183911e-04, 6.91879104e-05, ...,\n", + " 2.89193413e-04, 6.93224138e-04, 4.70515573e-03],\n", + " [9.65875201e-03, 1.88575720e-03, 1.10870227e-03, ...,\n", + " 2.58681760e-03, 3.97690758e-03, 1.58278439e-02]],\n", + " \n", + " [[3.34995706e-03, 7.49400002e-04, 4.44106583e-04, ...,\n", + " 4.23803140e-04, 1.16107799e-03, 7.32442131e-03],\n", + " [4.63154254e-04, 1.31924564e-04, 5.75331833e-05, ...,\n", + " 2.48063916e-05, 1.31788765e-04, 1.28855614e-03],\n", + " [1.84829667e-04, 4.22494304e-05, 1.93375345e-05, ...,\n", + " 3.12912471e-06, 2.20445181e-05, 3.40537110e-04],\n", + " ...,\n", + " [1.03433682e-04, 4.99963107e-06, 7.01059491e-07, ...,\n", + " 5.13319992e-06, 4.15788127e-05, 5.11675724e-04],\n", + " [2.40331719e-04, 2.02179817e-05, 4.81432517e-06, ...,\n", + " 2.36316791e-05, 1.03607512e-04, 9.28838330e-04],\n", + " [2.54508085e-03, 3.50280927e-04, 1.46785387e-04, ...,\n", + " 3.32942145e-04, 8.13076796e-04, 4.12234711e-03]],\n", + " \n", + " [[1.35694118e-03, 1.68786763e-04, 7.48198290e-05, ...,\n", + " 2.81019224e-04, 7.46771577e-04, 5.51791256e-03],\n", + " [1.08341432e-04, 1.00760262e-05, 2.75844582e-06, ...,\n", + " 8.99239421e-06, 5.42080925e-05, 7.20382726e-04],\n", + " [3.48077228e-05, 2.02582328e-06, 5.01538182e-07, ...,\n", + " 5.80834751e-07, 4.99987436e-06, 1.27662483e-04],\n", + " ...,\n", + " [1.09947527e-04, 5.69427766e-06, 8.12078270e-07, ...,\n", + " 1.42909403e-06, 1.42246154e-05, 2.88977870e-04],\n", + " [2.98362691e-04, 3.16787191e-05, 7.62458285e-06, ...,\n", + " 5.10403333e-06, 3.06438706e-05, 4.58879425e-04],\n", + " [2.99305934e-03, 5.03332238e-04, 2.14670028e-04, ...,\n", + " 1.38354051e-04, 4.05884377e-04, 2.70675565e-03]],\n", + " \n", + " ...,\n", + " \n", + " [[3.29867308e-03, 4.56455105e-04, 1.34146132e-04, ...,\n", + " 9.69568064e-05, 3.37075238e-04, 2.77165999e-03],\n", + " [4.24169732e-04, 3.51630406e-05, 5.06967717e-06, ...,\n", + " 5.14997964e-06, 3.60815611e-05, 4.70222265e-04],\n", + " [9.61356855e-05, 5.18314391e-06, 7.80053995e-07, ...,\n", + " 2.31303488e-06, 1.90853134e-05, 3.29483621e-04],\n", + " ...,\n", + " [8.78510036e-06, 3.06934055e-07, 1.15704395e-07, ...,\n", + " 4.02947308e-07, 3.91793219e-06, 1.21722005e-04],\n", + " [3.12283155e-05, 1.88902447e-06, 9.24211236e-07, ...,\n", + " 1.67769019e-06, 1.39318608e-05, 3.06272268e-04],\n", + " [9.07270878e-04, 9.94062648e-05, 5.61416928e-05, ...,\n", + " 5.73289071e-05, 2.48869706e-04, 2.29152758e-03]],\n", + " \n", + " [[5.91580058e-03, 1.10559561e-03, 3.32048250e-04, ...,\n", + " 2.32103208e-04, 6.34685624e-04, 4.07362822e-03],\n", + " [9.84633924e-04, 1.30795612e-04, 2.20851525e-05, ...,\n", + " 1.67520193e-05, 1.00321871e-04, 8.62589630e-04],\n", + " [2.57799606e-04, 2.33190221e-05, 3.75139280e-06, ...,\n", + " 7.50424260e-06, 5.11594008e-05, 5.78730425e-04],\n", + " ...,\n", + " [1.69782816e-05, 6.66840947e-07, 1.38846460e-07, ...,\n", + " 1.80018546e-07, 2.14967281e-06, 9.65387953e-05],\n", + " [5.67699681e-05, 3.36161565e-06, 1.02050706e-06, ...,\n", + " 1.21392122e-06, 1.14076483e-05, 2.88846204e-04],\n", + " [1.40678498e-03, 1.51812113e-04, 6.07411748e-05, ...,\n", + " 5.12399092e-05, 2.33667044e-04, 2.21933913e-03]],\n", + " \n", + " [[2.29063556e-02, 6.30153902e-03, 2.76722992e-03, ...,\n", + " 2.20514974e-03, 4.18286538e-03, 1.75831020e-02],\n", + " [4.68514580e-03, 7.92992418e-04, 3.06714675e-04, ...,\n", + " 2.61121342e-04, 7.31239910e-04, 4.39845258e-03],\n", + " [1.91156555e-03, 2.52487953e-04, 1.00966608e-04, ...,\n", + " 1.51911445e-04, 4.64357348e-04, 3.32914991e-03],\n", + " ...,\n", + " [7.02249992e-04, 5.80488013e-05, 1.94775675e-05, ...,\n", + " 8.09186895e-06, 4.56816706e-05, 9.44769825e-04],\n", + " [1.51418359e-03, 1.61546574e-04, 7.56840236e-05, ...,\n", + " 3.58892830e-05, 1.60053183e-04, 2.05396442e-03],\n", + " [1.25697535e-02, 2.19693198e-03, 1.26698730e-03, ...,\n", + " 6.89297158e-04, 1.90667005e-03, 1.04174931e-02]]]]],\n", + " dtype=float32)]" + ] + }, + "execution_count": 31, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "[tensor.data.to_numpy() for tensor in out.members.values()]" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "array([[[[[1.23823965e-02, 3.71413166e-03, 2.64202990e-03, ...,\n", + " 2.41754763e-03, 4.86477558e-03, 2.36471929e-02],\n", + " [2.31429259e-03, 7.02380203e-04, 4.09954780e-04, ...,\n", + " 2.52719648e-04, 7.57241389e-04, 5.46071166e-03],\n", + " [1.33902160e-03, 3.50114511e-04, 2.14061569e-04, ...,\n", + " 7.40970063e-05, 2.46272044e-04, 2.46915710e-03],\n", + " ...,\n", + " [8.08374316e-04, 7.53455024e-05, 2.44885123e-05, ...,\n", + " 8.84886977e-05, 3.21889500e-04, 2.94925272e-03],\n", + " [1.35905389e-03, 1.61183911e-04, 6.91879104e-05, ...,\n", + " 2.89193413e-04, 6.93224138e-04, 4.70515573e-03],\n", + " [9.65875201e-03, 1.88575720e-03, 1.10870227e-03, ...,\n", + " 2.58681760e-03, 3.97690758e-03, 1.58278439e-02]],\n", + "\n", + " [[3.34995706e-03, 7.49400002e-04, 4.44106583e-04, ...,\n", + " 4.23803140e-04, 1.16107799e-03, 7.32442131e-03],\n", + " [4.63154254e-04, 1.31924564e-04, 5.75331833e-05, ...,\n", + " 2.48063916e-05, 1.31788765e-04, 1.28855614e-03],\n", + " [1.84829667e-04, 4.22494304e-05, 1.93375345e-05, ...,\n", + " 3.12912471e-06, 2.20445181e-05, 3.40537110e-04],\n", + " ...,\n", + " [1.03433682e-04, 4.99963107e-06, 7.01059491e-07, ...,\n", + " 5.13319992e-06, 4.15788127e-05, 5.11675724e-04],\n", + " [2.40331719e-04, 2.02179817e-05, 4.81432517e-06, ...,\n", + " 2.36316791e-05, 1.03607512e-04, 9.28838330e-04],\n", + " [2.54508085e-03, 3.50280927e-04, 1.46785387e-04, ...,\n", + " 3.32942145e-04, 8.13076796e-04, 4.12234711e-03]],\n", + "\n", + " [[1.35694118e-03, 1.68786763e-04, 7.48198290e-05, ...,\n", + " 2.81019224e-04, 7.46771577e-04, 5.51791256e-03],\n", + " [1.08341432e-04, 1.00760262e-05, 2.75844582e-06, ...,\n", + " 8.99239421e-06, 5.42080925e-05, 7.20382726e-04],\n", + " [3.48077228e-05, 2.02582328e-06, 5.01538182e-07, ...,\n", + " 5.80834751e-07, 4.99987436e-06, 1.27662483e-04],\n", + " ...,\n", + " [1.09947527e-04, 5.69427766e-06, 8.12078270e-07, ...,\n", + " 1.42909403e-06, 1.42246154e-05, 2.88977870e-04],\n", + " [2.98362691e-04, 3.16787191e-05, 7.62458285e-06, ...,\n", + " 5.10403333e-06, 3.06438706e-05, 4.58879425e-04],\n", + " [2.99305934e-03, 5.03332238e-04, 2.14670028e-04, ...,\n", + " 1.38354051e-04, 4.05884377e-04, 2.70675565e-03]],\n", + "\n", + " ...,\n", + "\n", + " [[3.29867308e-03, 4.56455105e-04, 1.34146132e-04, ...,\n", + " 9.69568064e-05, 3.37075238e-04, 2.77165999e-03],\n", + " [4.24169732e-04, 3.51630406e-05, 5.06967717e-06, ...,\n", + " 5.14997964e-06, 3.60815611e-05, 4.70222265e-04],\n", + " [9.61356855e-05, 5.18314391e-06, 7.80053995e-07, ...,\n", + " 2.31303488e-06, 1.90853134e-05, 3.29483621e-04],\n", + " ...,\n", + " [8.78510036e-06, 3.06934055e-07, 1.15704395e-07, ...,\n", + " 4.02947308e-07, 3.91793219e-06, 1.21722005e-04],\n", + " [3.12283155e-05, 1.88902447e-06, 9.24211236e-07, ...,\n", + " 1.67769019e-06, 1.39318608e-05, 3.06272268e-04],\n", + " [9.07270878e-04, 9.94062648e-05, 5.61416928e-05, ...,\n", + " 5.73289071e-05, 2.48869706e-04, 2.29152758e-03]],\n", + "\n", + " [[5.91580058e-03, 1.10559561e-03, 3.32048250e-04, ...,\n", + " 2.32103208e-04, 6.34685624e-04, 4.07362822e-03],\n", + " [9.84633924e-04, 1.30795612e-04, 2.20851525e-05, ...,\n", + " 1.67520193e-05, 1.00321871e-04, 8.62589630e-04],\n", + " [2.57799606e-04, 2.33190221e-05, 3.75139280e-06, ...,\n", + " 7.50424260e-06, 5.11594008e-05, 5.78730425e-04],\n", + " ...,\n", + " [1.69782816e-05, 6.66840947e-07, 1.38846460e-07, ...,\n", + " 1.80018546e-07, 2.14967281e-06, 9.65387953e-05],\n", + " [5.67699681e-05, 3.36161565e-06, 1.02050706e-06, ...,\n", + " 1.21392122e-06, 1.14076483e-05, 2.88846204e-04],\n", + " [1.40678498e-03, 1.51812113e-04, 6.07411748e-05, ...,\n", + " 5.12399092e-05, 2.33667044e-04, 2.21933913e-03]],\n", + "\n", + " [[2.29063556e-02, 6.30153902e-03, 2.76722992e-03, ...,\n", + " 2.20514974e-03, 4.18286538e-03, 1.75831020e-02],\n", + " [4.68514580e-03, 7.92992418e-04, 3.06714675e-04, ...,\n", + " 2.61121342e-04, 7.31239910e-04, 4.39845258e-03],\n", + " [1.91156555e-03, 2.52487953e-04, 1.00966608e-04, ...,\n", + " 1.51911445e-04, 4.64357348e-04, 3.32914991e-03],\n", + " ...,\n", + " [7.02249992e-04, 5.80488013e-05, 1.94775675e-05, ...,\n", + " 8.09186895e-06, 4.56816706e-05, 9.44769825e-04],\n", + " [1.51418359e-03, 1.61546574e-04, 7.56840236e-05, ...,\n", + " 3.58892830e-05, 1.60053183e-04, 2.05396442e-03],\n", + " [1.25697535e-02, 2.19693198e-03, 1.26698730e-03, ...,\n", + " 6.89297158e-04, 1.90667005e-03, 1.04174931e-02]]]]],\n", + " dtype=float32)" + ] + }, + "execution_count": 24, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "out.members['output0'].data.to_numpy()" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'output0': }" + ] + }, + "execution_count": 17, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "out.members." + ] + }, + { + "cell_type": "code", + "execution_count": 34, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "array([[[ 2, 3, 0, ..., 1, 3, 0],\n", + " [ 0, 0, 0, ..., 0, 1, 0],\n", + " [ 3, 3, 2, ..., 0, 1, 0],\n", + " ...,\n", + " [ 3, 4, 2, ..., 1, 2, 1],\n", + " [ 4, 1, 2, ..., 6, 5, 3],\n", + " [ 3, 2, 2, ..., 4, 5, 1]],\n", + "\n", + " [[ 2, 2, 4, ..., 5, 8, 9],\n", + " [ 2, 1, 6, ..., 1, 0, 2],\n", + " [ 5, 3, 0, ..., 3, 6, 5],\n", + " ...,\n", + " [ 1, 5, 8, ..., 2, 1, 1],\n", + " [ 3, 3, 6, ..., 2, 3, 3],\n", + " [ 0, 0, 0, ..., 3, 2, 2]],\n", + "\n", + " [[ 1, 5, 3, ..., 1, 3, 1],\n", + " [ 5, 3, 8, ..., 2, 5, 6],\n", + " [ 1, 2, 5, ..., 2, 1, 3],\n", + " ...,\n", + " [ 0, 0, 3, ..., 3, 2, 3],\n", + " [ 5, 2, 2, ..., 2, 2, 1],\n", + " [ 6, 2, 3, ..., 8, 2, 6]],\n", + "\n", + " ...,\n", + "\n", + " [[ 3, 7, 6, ..., 2, 9, 7],\n", + " [ 5, 9, 8, ..., 2, 2, 5],\n", + " [ 4, 1, 2, ..., 6, 6, 3],\n", + " ...,\n", + " [ 0, 3, 6, ..., 2, 3, 3],\n", + " [ 2, 1, 3, ..., 0, 3, 2],\n", + " [ 5, 5, 1, ..., 4, 1, 5]],\n", + "\n", + " [[ 9, 6, 5, ..., 2, 3, 3],\n", + " [ 6, 4, 3, ..., 4, 2, 0],\n", + " [ 3, 4, 6, ..., 3, 5, 3],\n", + " ...,\n", + " [ 4, 2, 4, ..., 2, 6, 4],\n", + " [ 0, 0, 4, ..., 0, 2, 2],\n", + " [26, 0, 0, ..., 2, 2, 2]],\n", + "\n", + " [[ 4, 2, 4, ..., 0, 1, 1],\n", + " [ 1, 1, 1, ..., 0, 2, 1],\n", + " [ 2, 3, 4, ..., 6, 1, 2],\n", + " ...,\n", + " [ 2, 1, 2, ..., 2, 1, 2],\n", + " [ 4, 6, 3, ..., 1, 1, 2],\n", + " [ 1, 1, 2, ..., 1, 0, 0]]], dtype=uint16)" + ] + }, + "execution_count": 34, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "raw" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [], + "source": [ + "from plantseg.io import create_tiff\n", + "from plantseg.io.voxelsize import VoxelSize" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [], + "source": [ + "create_tiff(Path('/Users/qin/Documents/Work/small_3D_crop.tif'), raw, VoxelSize(voxels_size=(1, 1, 1)))" + ] + }, + { + "cell_type": "code", + "execution_count": 42, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "VoxelSize(voxels_size=None, unit='um')" + ] + }, + "execution_count": 42, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "VoxelSize()" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [], + "source": [ + "from bioimageio.spec import load_model_description\n", + "from bioimageio.core.axis import AxisId" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[32m2024-12-16 14:36:24.489\u001b[0m | \u001b[1mINFO \u001b[0m | \u001b[36mbioimageio.spec._internal.io_utils\u001b[0m:\u001b[36mopen_bioimageio_yaml\u001b[0m:\u001b[36m131\u001b[0m - \u001b[1mloading philosophical-panda from https://uk1s3.embassy.ebi.ac.uk/public-datasets/bioimage.io/philosophical-panda/0.0.11/files/rdf.yaml\u001b[0m\n", + "computing SHA256 of 101853864f8c8e986b2819c9ac44d0f9-test_input.npy (result: 6810255f5b5260fe39153f2192bedf30d9899ec4e770976b7813116c467579f0): 100%|██████████| 5529728/5529728 [00:00<00:00, 1344776498.48it/s]\n", + "computing SHA256 of 57b964a123db3ff8400bcce3ef902b18-test_output.npy (result: d802e3024da80bff93a9ec50fbe50b9c3946534aab1b60b911511111a8e2dbca): 100%|██████████| 8294528/8294528 [00:00<00:00, 1573628187.47it/s]\n", + "computing SHA256 of 0b712fa8abf8707021a71747e726bf7c-test_style.npy (result: ab464b406f9050561b40f7d76700ab5edf3aca97e31fe9a6069a51aeeca8bc81): 100%|██████████| 76928/76928 [00:00<00:00, 213681733.85it/s]\n", + "computing SHA256 of d0abd68ef3844b0d6fbed811e0fed878-test_downsampled_0.npy (result: 67df53fb440e94dbb9c8e4003dcbde158646a7975c4878cacdd251e1fcfb4225): 100%|██████████| 88473728/88473728 [00:00<00:00, 2695492167.77it/s]\n", + "computing SHA256 of 3765cac1d92a49daf0d6ec949919aeb1-test_downsampled_1.npy (result: cb4addbd763d96731ebd18ed001b87ab7195ec9198f01a753a363a06c27bfb1c): 100%|██████████| 44236928/44236928 [00:00<00:00, 2611701702.60it/s]\n", + "computing SHA256 of 7251078a2afa8713384a3103878dd09d-test_downsampled_2.npy (result: 9c0225b94d84fcc3adfb9a73eef1303d6adb318b57a5a801e0e2e1638b458e72): 100%|██████████| 22118528/22118528 [00:00<00:00, 2409657934.14it/s]\n", + "computing SHA256 of e434cdc3ea3e7ecfb752cdc001617875-test_downsampled_3.npy (result: 1ea789ff37d47197c847b585799f7d063e7592b0c5e9c3094fd0e3ac209b7fc2): 100%|██████████| 11059328/11059328 [00:00<00:00, 2449888225.82it/s]\n", + "computing SHA256 of 00bd170d6c9de6a391d6869f59058847-cpnet_wrapper.py (result: b8b947cdd0ea8f5b98bd7be5f12f38bb1ea1ebe0b455c62d9a6389cd21d134bf): 100%|██████████| 11053/11053 [00:00<00:00, 34315057.08it/s]\n", + "computing SHA256 of 8dbb20d5a3cb3a3dfdb5101a671861ce-cp_state_dict_1135_gold.pth (result: 26c277f3b8f6ca5aab30b4b0a832601aea60183cbed1c2333576f4135a643eb2): 100%|██████████| 26556687/26556687 [00:00<00:00, 2154733982.88it/s]\n", + "computing SHA256 of 17fee110c39ccad7c3cb36d00d2fdd2c-cp_traced_1135_gold.pt (result: f61bae146ab522902350eadda1d509ac1037726fe6d7fb63f6a8a314021d63e7): 100%|██████████| 26812339/26812339 [00:00<00:00, 2652775239.24it/s]\n" + ] + } + ], + "source": [ + "model_id = 'philosophical-panda'\n", + "# model_id = 'emotional-cricket'\n", + "model = load_model_description(model_id)" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/Users/qin/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/bioimageio/spec/_internal/io.py:351: UserWarning: dumping with mode='python' is currently not fully supported for fields that are included when packaging; returned objects are standard python objects\n", + " warnings.warn(\n", + "/Users/qin/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/pydantic/_internal/_serializers.py:42: UserWarning: Pydantic serializer warnings:\n", + " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxis` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=1.0)` - serialized value may not be as expected\n", + " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxisWithHalo` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=1.0)` - serialized value may not be as expected\n", + " v = handler(item, index)\n", + "/Users/qin/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/pydantic/_internal/_serializers.py:42: UserWarning: Pydantic serializer warnings:\n", + " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxis` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=1.0)` - serialized value may not be as expected\n", + " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxisWithHalo` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=1.0)` - serialized value may not be as expected\n", + " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxis` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=1.0)` - serialized value may not be as expected\n", + " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxisWithHalo` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=1.0)` - serialized value may not be as expected\n", + " v = handler(item, index)\n", + "/Users/qin/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/pydantic/_internal/_serializers.py:42: UserWarning: Pydantic serializer warnings:\n", + " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxis` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=1.0)` - serialized value may not be as expected\n", + " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxisWithHalo` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=1.0)` - serialized value may not be as expected\n", + " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxis` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=1.0)` - serialized value may not be as expected\n", + " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxisWithHalo` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=1.0)` - serialized value may not be as expected\n", + " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxis` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=1.0)` - serialized value may not be as expected\n", + " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxisWithHalo` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=1.0)` - serialized value may not be as expected\n", + " v = handler(item, index)\n", + "/Users/qin/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/pydantic/_internal/_serializers.py:42: UserWarning: Pydantic serializer warnings:\n", + " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxis` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=1.0)` - serialized value may not be as expected\n", + " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxisWithHalo` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=1.0)` - serialized value may not be as expected\n", + " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxis` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=2.0)` - serialized value may not be as expected\n", + " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxisWithHalo` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=2.0)` - serialized value may not be as expected\n", + " v = handler(item, index)\n", + "/Users/qin/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/pydantic/_internal/_serializers.py:42: UserWarning: Pydantic serializer warnings:\n", + " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxis` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=1.0)` - serialized value may not be as expected\n", + " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxisWithHalo` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=1.0)` - serialized value may not be as expected\n", + " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxis` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=2.0)` - serialized value may not be as expected\n", + " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxisWithHalo` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=2.0)` - serialized value may not be as expected\n", + " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxis` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=2.0)` - serialized value may not be as expected\n", + " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxisWithHalo` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=2.0)` - serialized value may not be as expected\n", + " v = handler(item, index)\n", + "/Users/qin/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/pydantic/_internal/_serializers.py:42: UserWarning: Pydantic serializer warnings:\n", + " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxis` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=1.0)` - serialized value may not be as expected\n", + " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxisWithHalo` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=1.0)` - serialized value may not be as expected\n", + " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxis` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=4.0)` - serialized value may not be as expected\n", + " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxisWithHalo` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=4.0)` - serialized value may not be as expected\n", + " v = handler(item, index)\n", + "/Users/qin/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/pydantic/_internal/_serializers.py:42: UserWarning: Pydantic serializer warnings:\n", + " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxis` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=1.0)` - serialized value may not be as expected\n", + " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxisWithHalo` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=1.0)` - serialized value may not be as expected\n", + " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxis` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=4.0)` - serialized value may not be as expected\n", + " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxisWithHalo` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=4.0)` - serialized value may not be as expected\n", + " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxis` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=4.0)` - serialized value may not be as expected\n", + " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxisWithHalo` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=4.0)` - serialized value may not be as expected\n", + " v = handler(item, index)\n", + "/Users/qin/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/pydantic/_internal/_serializers.py:42: UserWarning: Pydantic serializer warnings:\n", + " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxis` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=1.0)` - serialized value may not be as expected\n", + " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxisWithHalo` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=1.0)` - serialized value may not be as expected\n", + " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxis` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=8.0)` - serialized value may not be as expected\n", + " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxisWithHalo` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=8.0)` - serialized value may not be as expected\n", + " v = handler(item, index)\n", + "/Users/qin/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/pydantic/_internal/_serializers.py:42: UserWarning: Pydantic serializer warnings:\n", + " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxis` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=1.0)` - serialized value may not be as expected\n", + " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxisWithHalo` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=1.0)` - serialized value may not be as expected\n", + " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxis` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=8.0)` - serialized value may not be as expected\n", + " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxisWithHalo` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=8.0)` - serialized value may not be as expected\n", + " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxis` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=8.0)` - serialized value may not be as expected\n", + " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxisWithHalo` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=8.0)` - serialized value may not be as expected\n", + " v = handler(item, index)\n" + ] + }, + { + "data": { + "text/plain": [ + "{'name': 'Cellpose Plant Nuclei ResNet',\n", + " 'description': 'An experimental Cellpose nuclear model fine-tuned on ovules 1136, 1137, 1139, 1170 and tested on ovules 1135 (see reference for dataset details). A model for BioImage.IO team to test and develop post-processing tools.',\n", + " 'covers': [PosixPath('cellpose_raw_and_segmentation.jpg'),\n", + " PosixPath('cellpose_raw_and_probability.jpg'),\n", + " PosixPath('cellpose_raw.jpg')],\n", + " 'id_emoji': '🐼',\n", + " 'authors': [{'affiliation': 'EMBL',\n", + " 'email': None,\n", + " 'orcid': '0000-0002-4652-0795',\n", + " 'name': 'Qin Yu',\n", + " 'github_user': 'qin-yu'}],\n", + " 'attachments': [],\n", + " 'cite': [{'text': 'For more details of the model itself, see the manuscript',\n", + " 'doi': '10.1101/2024.02.19.580954',\n", + " 'url': None}],\n", + " 'license': 'MIT',\n", + " 'config': {'bioimageio': {'thumbnails': {'cellpose_raw.jpg': 'cellpose_raw.thumbnail.png',\n", + " 'cellpose_raw_and_probability.jpg': 'cellpose_raw_and_probability.thumbnail.png',\n", + " 'cellpose_raw_and_segmentation.jpg': 'cellpose_raw_and_segmentation.thumbnail.png'}}},\n", + " 'git_repo': 'https://github.com/kreshuklab/go-nuclear',\n", + " 'icon': None,\n", + " 'links': [],\n", + " 'uploader': {'email': 'qin.yu.95@outlook.com', 'name': 'Qin Yu'},\n", + " 'maintainers': [],\n", + " 'tags': ['cellpose', '3d', '2d', 'nuclei'],\n", + " 'version': '0.0.11',\n", + " 'format_version': '0.5.3',\n", + " 'type': 'model',\n", + " 'id': 'philosophical-panda',\n", + " 'documentation': PosixPath('README.md'),\n", + " 'inputs': [{'id': 'raw',\n", + " 'description': '',\n", + " 'axes': [{'size': {'min': 1, 'step': 1},\n", + " 'id': 'z',\n", + " 'description': '',\n", + " 'type': 'space',\n", + " 'unit': None,\n", + " 'scale': 1.0,\n", + " 'concatenable': False},\n", + " {'id': 'channel',\n", + " 'description': '',\n", + " 'type': 'channel',\n", + " 'channel_names': ['c1', 'c2']},\n", + " {'size': {'min': 16, 'step': 16},\n", + " 'id': 'y',\n", + " 'description': '',\n", + " 'type': 'space',\n", + " 'unit': None,\n", + " 'scale': 1.0,\n", + " 'concatenable': False},\n", + " {'size': {'min': 16, 'step': 16},\n", + " 'id': 'x',\n", + " 'description': '',\n", + " 'type': 'space',\n", + " 'unit': None,\n", + " 'scale': 1.0,\n", + " 'concatenable': False}],\n", + " 'test_tensor': {'source': PosixPath('test_input.npy'),\n", + " 'sha256': '6810255f5b5260fe39153f2192bedf30d9899ec4e770976b7813116c467579f0'},\n", + " 'sample_tensor': None,\n", + " 'data': {'type': 'float32',\n", + " 'range': (None, None),\n", + " 'unit': 'arbitrary unit',\n", + " 'scale': 1.0,\n", + " 'offset': None},\n", + " 'optional': False,\n", + " 'preprocessing': [{'id': 'ensure_dtype', 'kwargs': {'dtype': 'float32'}}]}],\n", + " 'outputs': [{'id': 'flow',\n", + " 'description': '',\n", + " 'axes': [{'size': {'tensor_id': 'raw', 'axis_id': 'z', 'offset': 0},\n", + " 'id': 'z',\n", + " 'description': '',\n", + " 'type': 'space',\n", + " 'unit': None,\n", + " 'scale': 1.0},\n", + " {'id': 'channel',\n", + " 'description': '',\n", + " 'type': 'channel',\n", + " 'channel_names': ['flow1', 'flow2', 'flow3']},\n", + " {'size': {'tensor_id': 'raw', 'axis_id': 'y', 'offset': 0},\n", + " 'id': 'y',\n", + " 'description': '',\n", + " 'type': 'space',\n", + " 'unit': None,\n", + " 'scale': 1.0},\n", + " {'size': {'tensor_id': 'raw', 'axis_id': 'x', 'offset': 0},\n", + " 'id': 'x',\n", + " 'description': '',\n", + " 'type': 'space',\n", + " 'unit': None,\n", + " 'scale': 1.0}],\n", + " 'test_tensor': {'source': PosixPath('test_output.npy'),\n", + " 'sha256': 'd802e3024da80bff93a9ec50fbe50b9c3946534aab1b60b911511111a8e2dbca'},\n", + " 'sample_tensor': None,\n", + " 'data': {'type': 'float32',\n", + " 'range': (None, None),\n", + " 'unit': 'arbitrary unit',\n", + " 'scale': 1.0,\n", + " 'offset': None},\n", + " 'postprocessing': [{'id': 'ensure_dtype', 'kwargs': {'dtype': 'float32'}}]},\n", + " {'id': 'style',\n", + " 'description': '',\n", + " 'axes': [{'size': {'tensor_id': 'raw', 'axis_id': 'z', 'offset': 0},\n", + " 'id': 'z',\n", + " 'description': '',\n", + " 'type': 'space',\n", + " 'unit': None,\n", + " 'scale': 1.0},\n", + " {'id': 'channel',\n", + " 'description': '',\n", + " 'type': 'channel',\n", + " 'channel_names': ['feature1',\n", + " 'feature2',\n", + " 'feature3',\n", + " 'feature4',\n", + " 'feature5',\n", + " 'feature6',\n", + " 'feature7',\n", + " 'feature8',\n", + " 'feature9',\n", + " 'feature10',\n", + " 'feature11',\n", + " 'feature12',\n", + " 'feature13',\n", + " 'feature14',\n", + " 'feature15',\n", + " 'feature16',\n", + " 'feature17',\n", + " 'feature18',\n", + " 'feature19',\n", + " 'feature20',\n", + " 'feature21',\n", + " 'feature22',\n", + " 'feature23',\n", + " 'feature24',\n", + " 'feature25',\n", + " 'feature26',\n", + " 'feature27',\n", + " 'feature28',\n", + " 'feature29',\n", + " 'feature30',\n", + " 'feature31',\n", + " 'feature32',\n", + " 'feature33',\n", + " 'feature34',\n", + " 'feature35',\n", + " 'feature36',\n", + " 'feature37',\n", + " 'feature38',\n", + " 'feature39',\n", + " 'feature40',\n", + " 'feature41',\n", + " 'feature42',\n", + " 'feature43',\n", + " 'feature44',\n", + " 'feature45',\n", + " 'feature46',\n", + " 'feature47',\n", + " 'feature48',\n", + " 'feature49',\n", + " 'feature50',\n", + " 'feature51',\n", + " 'feature52',\n", + " 'feature53',\n", + " 'feature54',\n", + " 'feature55',\n", + " 'feature56',\n", + " 'feature57',\n", + " 'feature58',\n", + " 'feature59',\n", + " 'feature60',\n", + " 'feature61',\n", + " 'feature62',\n", + " 'feature63',\n", + " 'feature64',\n", + " 'feature65',\n", + " 'feature66',\n", + " 'feature67',\n", + " 'feature68',\n", + " 'feature69',\n", + " 'feature70',\n", + " 'feature71',\n", + " 'feature72',\n", + " 'feature73',\n", + " 'feature74',\n", + " 'feature75',\n", + " 'feature76',\n", + " 'feature77',\n", + " 'feature78',\n", + " 'feature79',\n", + " 'feature80',\n", + " 'feature81',\n", + " 'feature82',\n", + " 'feature83',\n", + " 'feature84',\n", + " 'feature85',\n", + " 'feature86',\n", + " 'feature87',\n", + " 'feature88',\n", + " 'feature89',\n", + " 'feature90',\n", + " 'feature91',\n", + " 'feature92',\n", + " 'feature93',\n", + " 'feature94',\n", + " 'feature95',\n", + " 'feature96',\n", + " 'feature97',\n", + " 'feature98',\n", + " 'feature99',\n", + " 'feature100',\n", + " 'feature101',\n", + " 'feature102',\n", + " 'feature103',\n", + " 'feature104',\n", + " 'feature105',\n", + " 'feature106',\n", + " 'feature107',\n", + " 'feature108',\n", + " 'feature109',\n", + " 'feature110',\n", + " 'feature111',\n", + " 'feature112',\n", + " 'feature113',\n", + " 'feature114',\n", + " 'feature115',\n", + " 'feature116',\n", + " 'feature117',\n", + " 'feature118',\n", + " 'feature119',\n", + " 'feature120',\n", + " 'feature121',\n", + " 'feature122',\n", + " 'feature123',\n", + " 'feature124',\n", + " 'feature125',\n", + " 'feature126',\n", + " 'feature127',\n", + " 'feature128',\n", + " 'feature129',\n", + " 'feature130',\n", + " 'feature131',\n", + " 'feature132',\n", + " 'feature133',\n", + " 'feature134',\n", + " 'feature135',\n", + " 'feature136',\n", + " 'feature137',\n", + " 'feature138',\n", + " 'feature139',\n", + " 'feature140',\n", + " 'feature141',\n", + " 'feature142',\n", + " 'feature143',\n", + " 'feature144',\n", + " 'feature145',\n", + " 'feature146',\n", + " 'feature147',\n", + " 'feature148',\n", + " 'feature149',\n", + " 'feature150',\n", + " 'feature151',\n", + " 'feature152',\n", + " 'feature153',\n", + " 'feature154',\n", + " 'feature155',\n", + " 'feature156',\n", + " 'feature157',\n", + " 'feature158',\n", + " 'feature159',\n", + " 'feature160',\n", + " 'feature161',\n", + " 'feature162',\n", + " 'feature163',\n", + " 'feature164',\n", + " 'feature165',\n", + " 'feature166',\n", + " 'feature167',\n", + " 'feature168',\n", + " 'feature169',\n", + " 'feature170',\n", + " 'feature171',\n", + " 'feature172',\n", + " 'feature173',\n", + " 'feature174',\n", + " 'feature175',\n", + " 'feature176',\n", + " 'feature177',\n", + " 'feature178',\n", + " 'feature179',\n", + " 'feature180',\n", + " 'feature181',\n", + " 'feature182',\n", + " 'feature183',\n", + " 'feature184',\n", + " 'feature185',\n", + " 'feature186',\n", + " 'feature187',\n", + " 'feature188',\n", + " 'feature189',\n", + " 'feature190',\n", + " 'feature191',\n", + " 'feature192',\n", + " 'feature193',\n", + " 'feature194',\n", + " 'feature195',\n", + " 'feature196',\n", + " 'feature197',\n", + " 'feature198',\n", + " 'feature199',\n", + " 'feature200',\n", + " 'feature201',\n", + " 'feature202',\n", + " 'feature203',\n", + " 'feature204',\n", + " 'feature205',\n", + " 'feature206',\n", + " 'feature207',\n", + " 'feature208',\n", + " 'feature209',\n", + " 'feature210',\n", + " 'feature211',\n", + " 'feature212',\n", + " 'feature213',\n", + " 'feature214',\n", + " 'feature215',\n", + " 'feature216',\n", + " 'feature217',\n", + " 'feature218',\n", + " 'feature219',\n", + " 'feature220',\n", + " 'feature221',\n", + " 'feature222',\n", + " 'feature223',\n", + " 'feature224',\n", + " 'feature225',\n", + " 'feature226',\n", + " 'feature227',\n", + " 'feature228',\n", + " 'feature229',\n", + " 'feature230',\n", + " 'feature231',\n", + " 'feature232',\n", + " 'feature233',\n", + " 'feature234',\n", + " 'feature235',\n", + " 'feature236',\n", + " 'feature237',\n", + " 'feature238',\n", + " 'feature239',\n", + " 'feature240',\n", + " 'feature241',\n", + " 'feature242',\n", + " 'feature243',\n", + " 'feature244',\n", + " 'feature245',\n", + " 'feature246',\n", + " 'feature247',\n", + " 'feature248',\n", + " 'feature249',\n", + " 'feature250',\n", + " 'feature251',\n", + " 'feature252',\n", + " 'feature253',\n", + " 'feature254',\n", + " 'feature255',\n", + " 'feature256']}],\n", + " 'test_tensor': {'source': PosixPath('test_style.npy'),\n", + " 'sha256': 'ab464b406f9050561b40f7d76700ab5edf3aca97e31fe9a6069a51aeeca8bc81'},\n", + " 'sample_tensor': None,\n", + " 'data': {'type': 'float32',\n", + " 'range': (None, None),\n", + " 'unit': 'arbitrary unit',\n", + " 'scale': 1.0,\n", + " 'offset': None},\n", + " 'postprocessing': [{'id': 'ensure_dtype', 'kwargs': {'dtype': 'float32'}}]},\n", + " {'id': 'downsampled_0',\n", + " 'description': '',\n", + " 'axes': [{'size': {'tensor_id': 'raw', 'axis_id': 'z', 'offset': 0},\n", + " 'id': 'z',\n", + " 'description': '',\n", + " 'type': 'space',\n", + " 'unit': None,\n", + " 'scale': 1.0},\n", + " {'id': 'channel',\n", + " 'description': '',\n", + " 'type': 'channel',\n", + " 'channel_names': ['feature1',\n", + " 'feature2',\n", + " 'feature3',\n", + " 'feature4',\n", + " 'feature5',\n", + " 'feature6',\n", + " 'feature7',\n", + " 'feature8',\n", + " 'feature9',\n", + " 'feature10',\n", + " 'feature11',\n", + " 'feature12',\n", + " 'feature13',\n", + " 'feature14',\n", + " 'feature15',\n", + " 'feature16',\n", + " 'feature17',\n", + " 'feature18',\n", + " 'feature19',\n", + " 'feature20',\n", + " 'feature21',\n", + " 'feature22',\n", + " 'feature23',\n", + " 'feature24',\n", + " 'feature25',\n", + " 'feature26',\n", + " 'feature27',\n", + " 'feature28',\n", + " 'feature29',\n", + " 'feature30',\n", + " 'feature31',\n", + " 'feature32']},\n", + " {'size': {'tensor_id': 'raw', 'axis_id': 'y', 'offset': 0},\n", + " 'id': 'y',\n", + " 'description': '',\n", + " 'type': 'space',\n", + " 'unit': None,\n", + " 'scale': 1.0},\n", + " {'size': {'tensor_id': 'raw', 'axis_id': 'x', 'offset': 0},\n", + " 'id': 'x',\n", + " 'description': '',\n", + " 'type': 'space',\n", + " 'unit': None,\n", + " 'scale': 1.0}],\n", + " 'test_tensor': {'source': PosixPath('test_downsampled_0.npy'),\n", + " 'sha256': '67df53fb440e94dbb9c8e4003dcbde158646a7975c4878cacdd251e1fcfb4225'},\n", + " 'sample_tensor': None,\n", + " 'data': {'type': 'float32',\n", + " 'range': (None, None),\n", + " 'unit': 'arbitrary unit',\n", + " 'scale': 1.0,\n", + " 'offset': None},\n", + " 'postprocessing': [{'id': 'ensure_dtype', 'kwargs': {'dtype': 'float32'}}]},\n", + " {'id': 'downsampled_1',\n", + " 'description': '',\n", + " 'axes': [{'size': {'tensor_id': 'raw', 'axis_id': 'z', 'offset': 0},\n", + " 'id': 'z',\n", + " 'description': '',\n", + " 'type': 'space',\n", + " 'unit': None,\n", + " 'scale': 1.0},\n", + " {'id': 'channel',\n", + " 'description': '',\n", + " 'type': 'channel',\n", + " 'channel_names': ['feature1',\n", + " 'feature2',\n", + " 'feature3',\n", + " 'feature4',\n", + " 'feature5',\n", + " 'feature6',\n", + " 'feature7',\n", + " 'feature8',\n", + " 'feature9',\n", + " 'feature10',\n", + " 'feature11',\n", + " 'feature12',\n", + " 'feature13',\n", + " 'feature14',\n", + " 'feature15',\n", + " 'feature16',\n", + " 'feature17',\n", + " 'feature18',\n", + " 'feature19',\n", + " 'feature20',\n", + " 'feature21',\n", + " 'feature22',\n", + " 'feature23',\n", + " 'feature24',\n", + " 'feature25',\n", + " 'feature26',\n", + " 'feature27',\n", + " 'feature28',\n", + " 'feature29',\n", + " 'feature30',\n", + " 'feature31',\n", + " 'feature32',\n", + " 'feature33',\n", + " 'feature34',\n", + " 'feature35',\n", + " 'feature36',\n", + " 'feature37',\n", + " 'feature38',\n", + " 'feature39',\n", + " 'feature40',\n", + " 'feature41',\n", + " 'feature42',\n", + " 'feature43',\n", + " 'feature44',\n", + " 'feature45',\n", + " 'feature46',\n", + " 'feature47',\n", + " 'feature48',\n", + " 'feature49',\n", + " 'feature50',\n", + " 'feature51',\n", + " 'feature52',\n", + " 'feature53',\n", + " 'feature54',\n", + " 'feature55',\n", + " 'feature56',\n", + " 'feature57',\n", + " 'feature58',\n", + " 'feature59',\n", + " 'feature60',\n", + " 'feature61',\n", + " 'feature62',\n", + " 'feature63',\n", + " 'feature64']},\n", + " {'size': {'tensor_id': 'raw', 'axis_id': 'y', 'offset': 0},\n", + " 'id': 'y',\n", + " 'description': '',\n", + " 'type': 'space',\n", + " 'unit': None,\n", + " 'scale': 2.0},\n", + " {'size': {'tensor_id': 'raw', 'axis_id': 'x', 'offset': 0},\n", + " 'id': 'x',\n", + " 'description': '',\n", + " 'type': 'space',\n", + " 'unit': None,\n", + " 'scale': 2.0}],\n", + " 'test_tensor': {'source': PosixPath('test_downsampled_1.npy'),\n", + " 'sha256': 'cb4addbd763d96731ebd18ed001b87ab7195ec9198f01a753a363a06c27bfb1c'},\n", + " 'sample_tensor': None,\n", + " 'data': {'type': 'float32',\n", + " 'range': (None, None),\n", + " 'unit': 'arbitrary unit',\n", + " 'scale': 1.0,\n", + " 'offset': None},\n", + " 'postprocessing': [{'id': 'ensure_dtype', 'kwargs': {'dtype': 'float32'}}]},\n", + " {'id': 'downsampled_2',\n", + " 'description': '',\n", + " 'axes': [{'size': {'tensor_id': 'raw', 'axis_id': 'z', 'offset': 0},\n", + " 'id': 'z',\n", + " 'description': '',\n", + " 'type': 'space',\n", + " 'unit': None,\n", + " 'scale': 1.0},\n", + " {'id': 'channel',\n", + " 'description': '',\n", + " 'type': 'channel',\n", + " 'channel_names': ['feature1',\n", + " 'feature2',\n", + " 'feature3',\n", + " 'feature4',\n", + " 'feature5',\n", + " 'feature6',\n", + " 'feature7',\n", + " 'feature8',\n", + " 'feature9',\n", + " 'feature10',\n", + " 'feature11',\n", + " 'feature12',\n", + " 'feature13',\n", + " 'feature14',\n", + " 'feature15',\n", + " 'feature16',\n", + " 'feature17',\n", + " 'feature18',\n", + " 'feature19',\n", + " 'feature20',\n", + " 'feature21',\n", + " 'feature22',\n", + " 'feature23',\n", + " 'feature24',\n", + " 'feature25',\n", + " 'feature26',\n", + " 'feature27',\n", + " 'feature28',\n", + " 'feature29',\n", + " 'feature30',\n", + " 'feature31',\n", + " 'feature32',\n", + " 'feature33',\n", + " 'feature34',\n", + " 'feature35',\n", + " 'feature36',\n", + " 'feature37',\n", + " 'feature38',\n", + " 'feature39',\n", + " 'feature40',\n", + " 'feature41',\n", + " 'feature42',\n", + " 'feature43',\n", + " 'feature44',\n", + " 'feature45',\n", + " 'feature46',\n", + " 'feature47',\n", + " 'feature48',\n", + " 'feature49',\n", + " 'feature50',\n", + " 'feature51',\n", + " 'feature52',\n", + " 'feature53',\n", + " 'feature54',\n", + " 'feature55',\n", + " 'feature56',\n", + " 'feature57',\n", + " 'feature58',\n", + " 'feature59',\n", + " 'feature60',\n", + " 'feature61',\n", + " 'feature62',\n", + " 'feature63',\n", + " 'feature64',\n", + " 'feature65',\n", + " 'feature66',\n", + " 'feature67',\n", + " 'feature68',\n", + " 'feature69',\n", + " 'feature70',\n", + " 'feature71',\n", + " 'feature72',\n", + " 'feature73',\n", + " 'feature74',\n", + " 'feature75',\n", + " 'feature76',\n", + " 'feature77',\n", + " 'feature78',\n", + " 'feature79',\n", + " 'feature80',\n", + " 'feature81',\n", + " 'feature82',\n", + " 'feature83',\n", + " 'feature84',\n", + " 'feature85',\n", + " 'feature86',\n", + " 'feature87',\n", + " 'feature88',\n", + " 'feature89',\n", + " 'feature90',\n", + " 'feature91',\n", + " 'feature92',\n", + " 'feature93',\n", + " 'feature94',\n", + " 'feature95',\n", + " 'feature96',\n", + " 'feature97',\n", + " 'feature98',\n", + " 'feature99',\n", + " 'feature100',\n", + " 'feature101',\n", + " 'feature102',\n", + " 'feature103',\n", + " 'feature104',\n", + " 'feature105',\n", + " 'feature106',\n", + " 'feature107',\n", + " 'feature108',\n", + " 'feature109',\n", + " 'feature110',\n", + " 'feature111',\n", + " 'feature112',\n", + " 'feature113',\n", + " 'feature114',\n", + " 'feature115',\n", + " 'feature116',\n", + " 'feature117',\n", + " 'feature118',\n", + " 'feature119',\n", + " 'feature120',\n", + " 'feature121',\n", + " 'feature122',\n", + " 'feature123',\n", + " 'feature124',\n", + " 'feature125',\n", + " 'feature126',\n", + " 'feature127',\n", + " 'feature128']},\n", + " {'size': {'tensor_id': 'raw', 'axis_id': 'y', 'offset': 0},\n", + " 'id': 'y',\n", + " 'description': '',\n", + " 'type': 'space',\n", + " 'unit': None,\n", + " 'scale': 4.0},\n", + " {'size': {'tensor_id': 'raw', 'axis_id': 'x', 'offset': 0},\n", + " 'id': 'x',\n", + " 'description': '',\n", + " 'type': 'space',\n", + " 'unit': None,\n", + " 'scale': 4.0}],\n", + " 'test_tensor': {'source': PosixPath('test_downsampled_2.npy'),\n", + " 'sha256': '9c0225b94d84fcc3adfb9a73eef1303d6adb318b57a5a801e0e2e1638b458e72'},\n", + " 'sample_tensor': None,\n", + " 'data': {'type': 'float32',\n", + " 'range': (None, None),\n", + " 'unit': 'arbitrary unit',\n", + " 'scale': 1.0,\n", + " 'offset': None},\n", + " 'postprocessing': [{'id': 'ensure_dtype', 'kwargs': {'dtype': 'float32'}}]},\n", + " {'id': 'downsampled_3',\n", + " 'description': '',\n", + " 'axes': [{'size': {'tensor_id': 'raw', 'axis_id': 'z', 'offset': 0},\n", + " 'id': 'z',\n", + " 'description': '',\n", + " 'type': 'space',\n", + " 'unit': None,\n", + " 'scale': 1.0},\n", + " {'id': 'channel',\n", + " 'description': '',\n", + " 'type': 'channel',\n", + " 'channel_names': ['feature1',\n", + " 'feature2',\n", + " 'feature3',\n", + " 'feature4',\n", + " 'feature5',\n", + " 'feature6',\n", + " 'feature7',\n", + " 'feature8',\n", + " 'feature9',\n", + " 'feature10',\n", + " 'feature11',\n", + " 'feature12',\n", + " 'feature13',\n", + " 'feature14',\n", + " 'feature15',\n", + " 'feature16',\n", + " 'feature17',\n", + " 'feature18',\n", + " 'feature19',\n", + " 'feature20',\n", + " 'feature21',\n", + " 'feature22',\n", + " 'feature23',\n", + " 'feature24',\n", + " 'feature25',\n", + " 'feature26',\n", + " 'feature27',\n", + " 'feature28',\n", + " 'feature29',\n", + " 'feature30',\n", + " 'feature31',\n", + " 'feature32',\n", + " 'feature33',\n", + " 'feature34',\n", + " 'feature35',\n", + " 'feature36',\n", + " 'feature37',\n", + " 'feature38',\n", + " 'feature39',\n", + " 'feature40',\n", + " 'feature41',\n", + " 'feature42',\n", + " 'feature43',\n", + " 'feature44',\n", + " 'feature45',\n", + " 'feature46',\n", + " 'feature47',\n", + " 'feature48',\n", + " 'feature49',\n", + " 'feature50',\n", + " 'feature51',\n", + " 'feature52',\n", + " 'feature53',\n", + " 'feature54',\n", + " 'feature55',\n", + " 'feature56',\n", + " 'feature57',\n", + " 'feature58',\n", + " 'feature59',\n", + " 'feature60',\n", + " 'feature61',\n", + " 'feature62',\n", + " 'feature63',\n", + " 'feature64',\n", + " 'feature65',\n", + " 'feature66',\n", + " 'feature67',\n", + " 'feature68',\n", + " 'feature69',\n", + " 'feature70',\n", + " 'feature71',\n", + " 'feature72',\n", + " 'feature73',\n", + " 'feature74',\n", + " 'feature75',\n", + " 'feature76',\n", + " 'feature77',\n", + " 'feature78',\n", + " 'feature79',\n", + " 'feature80',\n", + " 'feature81',\n", + " 'feature82',\n", + " 'feature83',\n", + " 'feature84',\n", + " 'feature85',\n", + " 'feature86',\n", + " 'feature87',\n", + " 'feature88',\n", + " 'feature89',\n", + " 'feature90',\n", + " 'feature91',\n", + " 'feature92',\n", + " 'feature93',\n", + " 'feature94',\n", + " 'feature95',\n", + " 'feature96',\n", + " 'feature97',\n", + " 'feature98',\n", + " 'feature99',\n", + " 'feature100',\n", + " 'feature101',\n", + " 'feature102',\n", + " 'feature103',\n", + " 'feature104',\n", + " 'feature105',\n", + " 'feature106',\n", + " 'feature107',\n", + " 'feature108',\n", + " 'feature109',\n", + " 'feature110',\n", + " 'feature111',\n", + " 'feature112',\n", + " 'feature113',\n", + " 'feature114',\n", + " 'feature115',\n", + " 'feature116',\n", + " 'feature117',\n", + " 'feature118',\n", + " 'feature119',\n", + " 'feature120',\n", + " 'feature121',\n", + " 'feature122',\n", + " 'feature123',\n", + " 'feature124',\n", + " 'feature125',\n", + " 'feature126',\n", + " 'feature127',\n", + " 'feature128',\n", + " 'feature129',\n", + " 'feature130',\n", + " 'feature131',\n", + " 'feature132',\n", + " 'feature133',\n", + " 'feature134',\n", + " 'feature135',\n", + " 'feature136',\n", + " 'feature137',\n", + " 'feature138',\n", + " 'feature139',\n", + " 'feature140',\n", + " 'feature141',\n", + " 'feature142',\n", + " 'feature143',\n", + " 'feature144',\n", + " 'feature145',\n", + " 'feature146',\n", + " 'feature147',\n", + " 'feature148',\n", + " 'feature149',\n", + " 'feature150',\n", + " 'feature151',\n", + " 'feature152',\n", + " 'feature153',\n", + " 'feature154',\n", + " 'feature155',\n", + " 'feature156',\n", + " 'feature157',\n", + " 'feature158',\n", + " 'feature159',\n", + " 'feature160',\n", + " 'feature161',\n", + " 'feature162',\n", + " 'feature163',\n", + " 'feature164',\n", + " 'feature165',\n", + " 'feature166',\n", + " 'feature167',\n", + " 'feature168',\n", + " 'feature169',\n", + " 'feature170',\n", + " 'feature171',\n", + " 'feature172',\n", + " 'feature173',\n", + " 'feature174',\n", + " 'feature175',\n", + " 'feature176',\n", + " 'feature177',\n", + " 'feature178',\n", + " 'feature179',\n", + " 'feature180',\n", + " 'feature181',\n", + " 'feature182',\n", + " 'feature183',\n", + " 'feature184',\n", + " 'feature185',\n", + " 'feature186',\n", + " 'feature187',\n", + " 'feature188',\n", + " 'feature189',\n", + " 'feature190',\n", + " 'feature191',\n", + " 'feature192',\n", + " 'feature193',\n", + " 'feature194',\n", + " 'feature195',\n", + " 'feature196',\n", + " 'feature197',\n", + " 'feature198',\n", + " 'feature199',\n", + " 'feature200',\n", + " 'feature201',\n", + " 'feature202',\n", + " 'feature203',\n", + " 'feature204',\n", + " 'feature205',\n", + " 'feature206',\n", + " 'feature207',\n", + " 'feature208',\n", + " 'feature209',\n", + " 'feature210',\n", + " 'feature211',\n", + " 'feature212',\n", + " 'feature213',\n", + " 'feature214',\n", + " 'feature215',\n", + " 'feature216',\n", + " 'feature217',\n", + " 'feature218',\n", + " 'feature219',\n", + " 'feature220',\n", + " 'feature221',\n", + " 'feature222',\n", + " 'feature223',\n", + " 'feature224',\n", + " 'feature225',\n", + " 'feature226',\n", + " 'feature227',\n", + " 'feature228',\n", + " 'feature229',\n", + " 'feature230',\n", + " 'feature231',\n", + " 'feature232',\n", + " 'feature233',\n", + " 'feature234',\n", + " 'feature235',\n", + " 'feature236',\n", + " 'feature237',\n", + " 'feature238',\n", + " 'feature239',\n", + " 'feature240',\n", + " 'feature241',\n", + " 'feature242',\n", + " 'feature243',\n", + " 'feature244',\n", + " 'feature245',\n", + " 'feature246',\n", + " 'feature247',\n", + " 'feature248',\n", + " 'feature249',\n", + " 'feature250',\n", + " 'feature251',\n", + " 'feature252',\n", + " 'feature253',\n", + " 'feature254',\n", + " 'feature255',\n", + " 'feature256']},\n", + " {'size': {'tensor_id': 'raw', 'axis_id': 'y', 'offset': 0},\n", + " 'id': 'y',\n", + " 'description': '',\n", + " 'type': 'space',\n", + " 'unit': None,\n", + " 'scale': 8.0},\n", + " {'size': {'tensor_id': 'raw', 'axis_id': 'x', 'offset': 0},\n", + " 'id': 'x',\n", + " 'description': '',\n", + " 'type': 'space',\n", + " 'unit': None,\n", + " 'scale': 8.0}],\n", + " 'test_tensor': {'source': PosixPath('test_downsampled_3.npy'),\n", + " 'sha256': '1ea789ff37d47197c847b585799f7d063e7592b0c5e9c3094fd0e3ac209b7fc2'},\n", + " 'sample_tensor': None,\n", + " 'data': {'type': 'float32',\n", + " 'range': (None, None),\n", + " 'unit': 'arbitrary unit',\n", + " 'scale': 1.0,\n", + " 'offset': None},\n", + " 'postprocessing': [{'id': 'ensure_dtype',\n", + " 'kwargs': {'dtype': 'float32'}}]}],\n", + " 'packaged_by': [],\n", + " 'parent': None,\n", + " 'run_mode': None,\n", + " 'timestamp': datetime.datetime(2024, 12, 16, 13, 13, 2, 698812),\n", + " 'training_data': None,\n", + " 'weights': {'keras_hdf5': None,\n", + " 'onnx': None,\n", + " 'pytorch_state_dict': {'source': PosixPath('cp_state_dict_1135_gold.pth'),\n", + " 'sha256': '26c277f3b8f6ca5aab30b4b0a832601aea60183cbed1c2333576f4135a643eb2',\n", + " 'authors': None,\n", + " 'parent': None,\n", + " 'architecture': {'source': PosixPath('cpnet_wrapper.py'),\n", + " 'sha256': 'b8b947cdd0ea8f5b98bd7be5f12f38bb1ea1ebe0b455c62d9a6389cd21d134bf',\n", + " 'callable': 'CPnetBioImageIO',\n", + " 'kwargs': {'conv_3D': False,\n", + " 'max_pool': True,\n", + " 'mkldnn': False,\n", + " 'nbase': [2, 32, 64, 128, 256],\n", + " 'nout': 3,\n", + " 'sz': 3}},\n", + " 'pytorch_version': '2.3.1',\n", + " 'dependencies': None},\n", + " 'tensorflow_js': None,\n", + " 'tensorflow_saved_model_bundle': None,\n", + " 'torchscript': {'source': PosixPath('cp_traced_1135_gold.pt'),\n", + " 'sha256': 'f61bae146ab522902350eadda1d509ac1037726fe6d7fb63f6a8a314021d63e7',\n", + " 'authors': None,\n", + " 'parent': 'pytorch_state_dict',\n", + " 'pytorch_version': '2.3.1'}}}" + ] + }, + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "model.model_dump()" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['batch', 'channel', 'z', 'y', 'x']" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "[AxisId(a) if isinstance(a, str) else a.id for a in model.inputs[0].axes]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "{AxisId(a) if isinstance(a, str) else a.id : a.get('size', 1) for a in model.inputs[0].axes}" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "plant-seg-dev", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.7" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} From f43a5294afe36010f014ab49fef5958a395820e5 Mon Sep 17 00:00:00 2001 From: Qin Yu Date: Mon, 16 Dec 2024 19:36:10 +0100 Subject: [PATCH 18/32] feat: support both `blocksize_parameter` and `input_block_shape` for bioimageio.core blocked prediction --- plantseg/functionals/prediction/prediction.py | 63 ++++++++++++------- 1 file changed, 41 insertions(+), 22 deletions(-) diff --git a/plantseg/functionals/prediction/prediction.py b/plantseg/functionals/prediction/prediction.py index 6e1a1b9d..f6296ac0 100644 --- a/plantseg/functionals/prediction/prediction.py +++ b/plantseg/functionals/prediction/prediction.py @@ -29,6 +29,8 @@ def biio_prediction( input_layout: ImageLayout, model_id: str, ) -> np.ndarray: + assert isinstance(input_layout, str) + model = load_model_description(model_id) if isinstance(model, v0_4.ModelDescr): input_ids = [input_tensor.name for input_tensor in model.inputs] @@ -37,40 +39,57 @@ def biio_prediction( else: assert_never(model) + logger.info(f"Model expects these inputs: {input_ids}.") if len(input_ids) < 1: - logger.error("Model needs no input tensor.") + logger.error("Model needs no input tensor. PlantSeg does not support this yet.") if len(input_ids) > 1: - logger.warning("Model needs more than one input tensor. PlantSeg does not support this yet.") - tensor_id = input_ids[0] + logger.error("Model needs more than one input tensor. PlantSeg does not support this yet.") - logger.info(f"model expects these inputs: {input_ids}") - - assert isinstance(input_layout, str) + tensor_id = input_ids[0] + axes = model.inputs[0].axes # PlantSeg only supports one input tensor for now dims = tuple( AxisId('channel') if item.lower() == 'c' else AxisId(item.lower()) for item in input_layout ) # `AxisId` has to be "channel" not "c" members = { TensorId(tensor_id): Tensor(array=raw, dims=dims).transpose( - [AxisId(a) if isinstance(a, str) else a.id for a in model.inputs[0].axes] + [AxisId(a) if isinstance(a, str) else a.id for a in axes] ) } - input_block_shape = { - TensorId(tensor_id): { - # 'emotional-cricket' has: - # {'batch': None, 'channel': 1, 'z': 100, 'y': 128, 'x': 128} - # - # 'philosophical-panda' has: - # {'z': ParameterizedSize(min=1, step=1), - # 'channel': 2, - # 'y': ParameterizedSize(min=16, step=16), - # 'x': ParameterizedSize(min=16, step=16)} - AxisId(a) if isinstance(a, str) else a.id: a.size if a.size is not None else 1 - for a in model.inputs[0].axes + sample = Sample(members=members, stat={}, id="raw") + + for a in axes: + if isinstance(a, str): + raise ValueError(f"Model has a string axis: {a}, please report issue to PlantSeg developers.") + sizes_in_rdf = {a.id: a.size for a in axes} + assert 'x' in sizes_in_rdf, "Model does not have 'x' axis in input tensor." + size_to_check = sizes_in_rdf[AxisId('x')] + if isinstance(size_to_check, int): # e.g. 'emotional-cricket' + # 'emotional-cricket' has {'batch': None, 'channel': 1, 'z': 100, 'y': 128, 'x': 128} + input_block_shape = { + TensorId(tensor_id): { + a.id: a.size if isinstance(a.size, int) else 1 + for a in axes + if not isinstance(a, str) # for a.size/a.id type checking only + } } - } + sample_out = predict(model=model, inputs=sample, input_block_shape=input_block_shape) + elif isinstance(size_to_check, v0_5.ParameterizedSize): # e.g. 'philosophical-panda' + # 'philosophical-panda' has: + # {'z': ParameterizedSize(min=1, step=1), + # 'channel': 2, + # 'y': ParameterizedSize(min=16, step=16), + # 'x': ParameterizedSize(min=16, step=16)} + blocksize_parameter = { + (TensorId(tensor_id), a.id): ( + (96 - a.size.min) // a.size.step if isinstance(a.size, v0_5.ParameterizedSize) else 1 + ) + for a in axes + if not isinstance(a, str) # for a.size/a.id type checking only + } + sample_out = predict(model=model, inputs=sample, blocksize_parameter=blocksize_parameter) + else: + assert_never(size_to_check) - sample = Sample(members=members, stat={}, id="raw") - sample_out = predict(model=model, inputs=sample, input_block_shape=input_block_shape) assert isinstance(sample_out, Sample) if len(sample_out.members) != 1: logger.warning("Model has more than one output tensor. PlantSeg does not support this yet.") From cf0e69bf5d1a9d69b71c692b1b1a31b42ba8ae7f Mon Sep 17 00:00:00 2001 From: Qin Yu Date: Mon, 16 Dec 2024 22:37:10 +0100 Subject: [PATCH 19/32] feat: support arbitrary network output suck as Cellpose with broken types and returns --- plantseg/functionals/prediction/prediction.py | 12 +- plantseg/tasks/prediction_tasks.py | 2 +- test_biio.py | 17 - test_zoo.ipynb | 2554 ----------------- 4 files changed, 8 insertions(+), 2577 deletions(-) delete mode 100644 test_biio.py delete mode 100644 test_zoo.ipynb diff --git a/plantseg/functionals/prediction/prediction.py b/plantseg/functionals/prediction/prediction.py index f6296ac0..5b6e3377 100644 --- a/plantseg/functionals/prediction/prediction.py +++ b/plantseg/functionals/prediction/prediction.py @@ -93,11 +93,13 @@ def biio_prediction( assert isinstance(sample_out, Sample) if len(sample_out.members) != 1: logger.warning("Model has more than one output tensor. PlantSeg does not support this yet.") - key = list(sample_out.members.keys())[0] - pmaps = sample_out.members[key].data.to_numpy()[0] - assert pmaps.ndim == 4, f"Expected 4D CZXY prediction from `biio_prediction()`, got {pmaps.ndim}D" - - return pmaps + t = {i: o.transpose(['batch', 'channel', 'z', 'y', 'x']) for i, o in sample_out.members.items()} + pmaps = [] + for i, bczyx in t.items(): + for czyx in bczyx: + for zyx in czyx: + pmaps.append(zyx.data.to_numpy()) + return pmaps # FIXME: Wrong return type def unet_prediction( diff --git a/plantseg/tasks/prediction_tasks.py b/plantseg/tasks/prediction_tasks.py index 4f185615..7e9f6402 100644 --- a/plantseg/tasks/prediction_tasks.py +++ b/plantseg/tasks/prediction_tasks.py @@ -51,7 +51,7 @@ def unet_prediction_task( config_path=config_path, model_weights_path=model_weights_path, ) - assert pmaps.ndim == 4, f"Expected 4D CZXY prediction, got {pmaps.ndim}D" + # assert pmaps.ndim == 4, f"Expected 4D CZXY prediction, got {pmaps.ndim}D" new_images = [] diff --git a/test_biio.py b/test_biio.py deleted file mode 100644 index 1f8d2522..00000000 --- a/test_biio.py +++ /dev/null @@ -1,17 +0,0 @@ -import numpy as np -from bioimageio.core.prediction import predict -from bioimageio.core.sample import Sample -from bioimageio.core.tensor import Tensor -from bioimageio.spec.model.v0_5 import TensorId - -array = np.random.randint(0, 255, (2, 128, 128, 128), dtype=np.uint8) -dims = ('c', 'z', 'y', 'x') -sample = Sample(members={TensorId('a'): Tensor(array=array, dims=dims)}, stat={}, id='try') - -temp = predict( - # model='https://uk1s3.embassy.ebi.ac.uk/public-datasets/bioimage.io/philosophical-panda/0.0.11/files/rdf.yaml', - model='https://uk1s3.embassy.ebi.ac.uk/public-datasets/bioimage.io/emotional-cricket/1.1/files/rdf.yaml', - # model='/Users/qin/Downloads/efficient-chipmunk.yaml', - inputs=sample, - sample_id='sample', -) diff --git a/test_zoo.ipynb b/test_zoo.ipynb deleted file mode 100644 index 5cf42488..00000000 --- a/test_zoo.ipynb +++ /dev/null @@ -1,2554 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "INFO: P [MainThread] 2024-12-05 16:54:48,153 plantseg - Logger configured at initialisation. PlantSeg logger name: plantseg\n" - ] - } - ], - "source": [ - "from plantseg.core.zoo import model_zoo" - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "PlantSeg version: 2.0.0a7\n", - "PyTorch version: 2.2.2\n" - ] - } - ], - "source": [ - "import torch\n", - "\n", - "from plantseg import __version__\n", - "\n", - "print(f\"PlantSeg version: {__version__.__version__}\")\n", - "print(f\"PyTorch version: {torch.__version__}\")" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [], - "source": [ - "model_zoo.refresh_bioimageio_zoo_urls()" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
nameurlpathdescriptionresolutiondimensionalitymodalityrecommended_patch_sizeoutput_typedoiadded_byname_displayrdf_sourcesupported
id
affable-sharkNucleiSegmentationBoundaryModelNoneNoneNoneNoneNoneNoneNoneNoneNonebioimage.ioaffable-shark : NucleiSegmentationB...https://uk1s3.embassy.ebi.ac.uk/public-dataset...False
chatty-frogStarDist H&E Nuclei SegmentationNoneNoneNoneNoneNoneNoneNoneNoneNonebioimage.iochatty-frog : StarDist H&E Nuclei...https://uk1s3.embassy.ebi.ac.uk/public-dataset...False
hiding-tigerLiveCellSegmentationBoundaryModelNoneNoneNoneNoneNoneNoneNoneNoneNonebioimage.iohiding-tiger : LiveCellSegmentatio...https://uk1s3.embassy.ebi.ac.uk/public-dataset...False
impartial-shrimpNeuron Segmentation in EM (Membrane Prediction)NoneNoneNoneNoneNoneNoneNoneNoneNonebioimage.ioimpartial-shrimp : Neuron Segmentation...https://uk1s3.embassy.ebi.ac.uk/public-dataset...False
discreet-roosterPancreatic Phase Contrast Cell Segmentation (U...NoneNoneNoneNoneNoneNoneNoneNoneNonebioimage.iodiscreet-rooster : Pancreatic Phase Co...https://uk1s3.embassy.ebi.ac.uk/public-dataset...False
.............................................
stupendous-sheep(Empanada) 2D Instance Mitochondrial Segmentat...NoneNoneNoneNoneNoneNoneNoneNoneNonebioimage.iostupendous-sheep : (Empanada) 2D Insta...https://uk1s3.embassy.ebi.ac.uk/public-dataset...False
pioneering-goatUniFMIRProjectionOnFlyWingNoneNoneNoneNoneNoneNoneNoneNoneNonebioimage.iopioneering-goat : UniFMIRProjectionOn...https://uk1s3.embassy.ebi.ac.uk/public-dataset...False
greedy-sharkUniFMIRVolumetricReconstructionOnVCDNoneNoneNoneNoneNoneNoneNoneNoneNonebioimage.iogreedy-shark : UniFMIRVolumetricRe...https://uk1s3.embassy.ebi.ac.uk/public-dataset...False
lucky-foxUniFMIRIsotropicReconstructionOnLiverNoneNoneNoneNoneNoneNoneNoneNoneNonebioimage.iolucky-fox : UniFMIRIsotropicRec...https://uk1s3.embassy.ebi.ac.uk/public-dataset...False
frank-water-buffaloUniFMIRDenoiseOnPlanariaNoneNoneNoneNoneNoneNoneNoneNoneNonebioimage.iofrank-water-buffalo : UniFMIRDenoiseOnPla...https://uk1s3.embassy.ebi.ac.uk/public-dataset...False
\n", - "

68 rows × 14 columns

\n", - "
" - ], - "text/plain": [ - " name url \\\n", - "id \n", - "affable-shark NucleiSegmentationBoundaryModel None \n", - "chatty-frog StarDist H&E Nuclei Segmentation None \n", - "hiding-tiger LiveCellSegmentationBoundaryModel None \n", - "impartial-shrimp Neuron Segmentation in EM (Membrane Prediction) None \n", - "discreet-rooster Pancreatic Phase Contrast Cell Segmentation (U... None \n", - "... ... ... \n", - "stupendous-sheep (Empanada) 2D Instance Mitochondrial Segmentat... None \n", - "pioneering-goat UniFMIRProjectionOnFlyWing None \n", - "greedy-shark UniFMIRVolumetricReconstructionOnVCD None \n", - "lucky-fox UniFMIRIsotropicReconstructionOnLiver None \n", - "frank-water-buffalo UniFMIRDenoiseOnPlanaria None \n", - "\n", - " path description resolution dimensionality modality \\\n", - "id \n", - "affable-shark None None None None None \n", - "chatty-frog None None None None None \n", - "hiding-tiger None None None None None \n", - "impartial-shrimp None None None None None \n", - "discreet-rooster None None None None None \n", - "... ... ... ... ... ... \n", - "stupendous-sheep None None None None None \n", - "pioneering-goat None None None None None \n", - "greedy-shark None None None None None \n", - "lucky-fox None None None None None \n", - "frank-water-buffalo None None None None None \n", - "\n", - " recommended_patch_size output_type doi added_by \\\n", - "id \n", - "affable-shark None None None bioimage.io \n", - "chatty-frog None None None bioimage.io \n", - "hiding-tiger None None None bioimage.io \n", - "impartial-shrimp None None None bioimage.io \n", - "discreet-rooster None None None bioimage.io \n", - "... ... ... ... ... \n", - "stupendous-sheep None None None bioimage.io \n", - "pioneering-goat None None None bioimage.io \n", - "greedy-shark None None None bioimage.io \n", - "lucky-fox None None None bioimage.io \n", - "frank-water-buffalo None None None bioimage.io \n", - "\n", - " name_display \\\n", - "id \n", - "affable-shark affable-shark : NucleiSegmentationB... \n", - "chatty-frog chatty-frog : StarDist H&E Nuclei... \n", - "hiding-tiger hiding-tiger : LiveCellSegmentatio... \n", - "impartial-shrimp impartial-shrimp : Neuron Segmentation... \n", - "discreet-rooster discreet-rooster : Pancreatic Phase Co... \n", - "... ... \n", - "stupendous-sheep stupendous-sheep : (Empanada) 2D Insta... \n", - "pioneering-goat pioneering-goat : UniFMIRProjectionOn... \n", - "greedy-shark greedy-shark : UniFMIRVolumetricRe... \n", - "lucky-fox lucky-fox : UniFMIRIsotropicRec... \n", - "frank-water-buffalo frank-water-buffalo : UniFMIRDenoiseOnPla... \n", - "\n", - " rdf_source \\\n", - "id \n", - "affable-shark https://uk1s3.embassy.ebi.ac.uk/public-dataset... \n", - "chatty-frog https://uk1s3.embassy.ebi.ac.uk/public-dataset... \n", - "hiding-tiger https://uk1s3.embassy.ebi.ac.uk/public-dataset... \n", - "impartial-shrimp https://uk1s3.embassy.ebi.ac.uk/public-dataset... \n", - "discreet-rooster https://uk1s3.embassy.ebi.ac.uk/public-dataset... \n", - "... ... \n", - "stupendous-sheep https://uk1s3.embassy.ebi.ac.uk/public-dataset... \n", - "pioneering-goat https://uk1s3.embassy.ebi.ac.uk/public-dataset... \n", - "greedy-shark https://uk1s3.embassy.ebi.ac.uk/public-dataset... \n", - "lucky-fox https://uk1s3.embassy.ebi.ac.uk/public-dataset... \n", - "frank-water-buffalo https://uk1s3.embassy.ebi.ac.uk/public-dataset... \n", - "\n", - " supported \n", - "id \n", - "affable-shark False \n", - "chatty-frog False \n", - "hiding-tiger False \n", - "impartial-shrimp False \n", - "discreet-rooster False \n", - "... ... \n", - "stupendous-sheep False \n", - "pioneering-goat False \n", - "greedy-shark False \n", - "lucky-fox False \n", - "frank-water-buffalo False \n", - "\n", - "[68 rows x 14 columns]" - ] - }, - "execution_count": 4, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "model_zoo.models_bioimageio" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "['affable-shark',\n", - " 'affectionate-cow',\n", - " 'ambitious-ant',\n", - " 'ambitious-sloth',\n", - " 'amiable-crocodile',\n", - " 'charismatic-whale',\n", - " 'chatty-frog',\n", - " 'committed-turkey',\n", - " 'conscientious-seashell',\n", - " 'courteous-otter',\n", - " 'creative-panda',\n", - " 'dazzling-spider',\n", - " 'decisive-panda',\n", - " 'determined-chipmunk',\n", - " 'determined-hedgehog',\n", - " 'diplomatic-bug',\n", - " 'discreet-rooster',\n", - " 'dynamic-t-rex',\n", - " 'easy-going-sauropod',\n", - " 'efficient-chipmunk',\n", - " 'emotional-cricket',\n", - " 'faithful-chicken',\n", - " 'famous-fish',\n", - " 'fearless-crab',\n", - " 'frank-water-buffalo',\n", - " 'greedy-shark',\n", - " 'greedy-whale',\n", - " 'happy-elephant',\n", - " 'hiding-blowfish',\n", - " 'hiding-tiger',\n", - " 'humorous-crab',\n", - " 'humorous-fox',\n", - " 'humorous-owl',\n", - " 'idealistic-rat',\n", - " 'impartial-shark',\n", - " 'impartial-shrimp',\n", - " 'independent-shrimp',\n", - " 'joyful-deer',\n", - " 'kind-seashell',\n", - " 'laid-back-lobster',\n", - " 'loyal-parrot',\n", - " 'loyal-squid',\n", - " 'lucky-fox',\n", - " 'modest-octopus',\n", - " 'naked-microbe',\n", - " 'nice-peacock',\n", - " 'noisy-fish',\n", - " 'noisy-hedgehog',\n", - " 'noisy-ox',\n", - " 'non-judgemental-eagle',\n", - " 'organized-badger',\n", - " 'organized-cricket',\n", - " 'passionate-t-rex',\n", - " 'philosophical-panda',\n", - " 'pioneering-goat',\n", - " 'pioneering-rhino',\n", - " 'placid-llama',\n", - " 'polite-pig',\n", - " 'powerful-chipmunk',\n", - " 'powerful-fish',\n", - " 'resourceful-lizard',\n", - " 'shivering-raccoon',\n", - " 'straightforward-crocodile',\n", - " 'stupendous-sheep',\n", - " 'thoughtful-turtle',\n", - " 'wild-rhino',\n", - " 'wild-whale',\n", - " 'willing-hedgehog']" - ] - }, - "execution_count": 5, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# get all index\n", - "\n", - "sorted(model_zoo.models_bioimageio.index.to_list())" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "['efficient-chipmunk',\n", - " 'emotional-cricket',\n", - " 'loyal-squid',\n", - " 'noisy-fish',\n", - " 'passionate-t-rex',\n", - " 'pioneering-rhino',\n", - " 'powerful-fish',\n", - " 'thoughtful-turtle']" - ] - }, - "execution_count": 6, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# get all index where `supported` is True:\n", - "\n", - "sorted(model_zoo.models_bioimageio[model_zoo.models_bioimageio[\"supported\"]].index.to_list())" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "True" - ] - }, - "execution_count": 7, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "'efficient-chipmunk' in model_zoo.models_bioimageio.index" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "name PlantSeg Plant Nuclei 3D UNet\n", - "url None\n", - "path None\n", - "description None\n", - "resolution None\n", - "dimensionality None\n", - "modality None\n", - "recommended_patch_size None\n", - "output_type None\n", - "doi None\n", - "added_by bioimage.io\n", - "name_display efficient-chipmunk : PlantSeg Plant Nucl...\n", - "rdf_source https://uk1s3.embassy.ebi.ac.uk/public-dataset...\n", - "supported True\n", - "Name: efficient-chipmunk, dtype: object" - ] - }, - "execution_count": 8, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "model_zoo.models_bioimageio.loc['efficient-chipmunk']" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "Url('https://uk1s3.embassy.ebi.ac.uk/public-datasets/bioimage.io/efficient-chipmunk/1/files/rdf.yaml')" - ] - }, - "execution_count": 9, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "model_zoo.models_bioimageio.loc['efficient-chipmunk']['rdf_source']" - ] - }, - { - "cell_type": "code", - "execution_count": 20, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "Url('https://uk1s3.embassy.ebi.ac.uk/public-datasets/bioimage.io/emotional-cricket/1.1/files/rdf.yaml')" - ] - }, - "execution_count": 20, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "model_zoo.models_bioimageio.loc['emotional-cricket']['rdf_source']" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "id\n", - "affable-shark NucleiSegmentationBoundaryModel\n", - "chatty-frog StarDist H&E Nuclei Segmentation\n", - "hiding-tiger LiveCellSegmentationBoundaryModel\n", - "impartial-shrimp Neuron Segmentation in EM (Membrane Prediction)\n", - "discreet-rooster Pancreatic Phase Contrast Cell Segmentation (U...\n", - " ... \n", - "stupendous-sheep (Empanada) 2D Instance Mitochondrial Segmentat...\n", - "pioneering-goat UniFMIRProjectionOnFlyWing\n", - "greedy-shark UniFMIRVolumetricReconstructionOnVCD\n", - "lucky-fox UniFMIRIsotropicReconstructionOnLiver\n", - "frank-water-buffalo UniFMIRDenoiseOnPlanaria\n", - "Name: name, Length: 68, dtype: object" - ] - }, - "execution_count": 10, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "model_zoo.models_bioimageio['name']" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "pydantic_core._pydantic_core.Url" - ] - }, - "execution_count": 11, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "type(model_zoo.models_bioimageio.at['efficient-chipmunk', 'rdf_source'])" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "pydantic_core._pydantic_core.Url" - ] - }, - "execution_count": 12, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "type(model_zoo.models_bioimageio.loc['efficient-chipmunk']['rdf_source'])" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "metadata": {}, - "outputs": [], - "source": [ - "import numpy as np\n", - "from bioimageio.core.prediction import predict\n", - "from bioimageio.core.sample import Sample\n", - "from bioimageio.core.tensor import Tensor\n", - "from bioimageio.spec.model.v0_5 import TensorId" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "array = np.random.randint(0, 255, (128, 128, 128), dtype=np.uint8)\n", - "dims = ('z', 'y', 'x')\n", - "sample = Sample(members={TensorId('a'): Tensor(array, dims)}, stat={}, id='try')\n", - "# sample.members[TensorId('a')].data" - ] - }, - { - "cell_type": "code", - "execution_count": 19, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "Url('https://uk1s3.embassy.ebi.ac.uk/public-datasets/bioimage.io/philosophical-panda/0.0.11/files/rdf.yaml')" - ] - }, - "execution_count": 19, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "model_zoo.models_bioimageio.at['philosophical-panda', 'rdf_source']" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "Downloading data from 'https://uk1s3.embassy.ebi.ac.uk/public-datasets/bioimage.io/philosophical-panda/0.0.11/files/rdf.yaml' to file '/Users/qin/Library/Caches/bioimageio/520a69782c7dafb6478e43ebcc4679b0-rdf.yaml'.\n", - "100%|█████████████████████████████████████| 13.5k/13.5k [00:00<00:00, 35.7MB/s]\n", - "SHA256 hash of downloaded file: bbad75237ecf4f9d9f6259b13b97fc01b5cbeb4e3ea672e72826608d68197a32\n", - "Use this value as the 'known_hash' argument of 'pooch.retrieve' to ensure that the file hasn't changed if it is downloaded again in the future.\n", - "Downloading data from 'https://uk1s3.embassy.ebi.ac.uk/public-datasets/bioimage.io/philosophical-panda/0.0.11/files/README.md' to file '/Users/qin/Library/Caches/bioimageio/42620dae3e3850cefbf0475c7cf590dd-README.md'.\n", - "100%|██████████████████████████████████████████| 431/431 [00:00<00:00, 658kB/s]\n", - "SHA256 hash of downloaded file: fc6e1292ca309bedaca504260cecc9a7bc9f26e9328eb7a051f82a2ceec475e3\n", - "Use this value as the 'known_hash' argument of 'pooch.retrieve' to ensure that the file hasn't changed if it is downloaded again in the future.\n", - "Downloading data from 'https://uk1s3.embassy.ebi.ac.uk/public-datasets/bioimage.io/philosophical-panda/0.0.11/files/test_input.npy' to file '/Users/qin/Library/Caches/bioimageio/101853864f8c8e986b2819c9ac44d0f9-test_input.npy'.\n", - "100%|█████████████████████████████████████| 5.53M/5.53M [00:00<00:00, 8.91GB/s]\n", - "computing SHA256 of 101853864f8c8e986b2819c9ac44d0f9-test_input.npy (result: 6810255f5b5260fe39153f2192bedf30d9899ec4e770976b7813116c467579f0): 100%|██████████| 5529728/5529728 [00:00<00:00, 1220895945.11it/s]\n", - "Downloading data from 'https://uk1s3.embassy.ebi.ac.uk/public-datasets/bioimage.io/philosophical-panda/0.0.11/files/test_output.npy' to file '/Users/qin/Library/Caches/bioimageio/57b964a123db3ff8400bcce3ef902b18-test_output.npy'.\n", - "100%|█████████████████████████████████████| 8.29M/8.29M [00:00<00:00, 11.9GB/s]\n", - "computing SHA256 of 57b964a123db3ff8400bcce3ef902b18-test_output.npy (result: d802e3024da80bff93a9ec50fbe50b9c3946534aab1b60b911511111a8e2dbca): 100%|██████████| 8294528/8294528 [00:00<00:00, 1690711569.64it/s]\n", - "Downloading data from 'https://uk1s3.embassy.ebi.ac.uk/public-datasets/bioimage.io/philosophical-panda/0.0.11/files/test_style.npy' to file '/Users/qin/Library/Caches/bioimageio/0b712fa8abf8707021a71747e726bf7c-test_style.npy'.\n", - "100%|█████████████████████████████████████| 76.9k/76.9k [00:00<00:00, 71.6MB/s]\n", - "computing SHA256 of 0b712fa8abf8707021a71747e726bf7c-test_style.npy (result: ab464b406f9050561b40f7d76700ab5edf3aca97e31fe9a6069a51aeeca8bc81): 100%|██████████| 76928/76928 [00:00<00:00, 106734838.94it/s]\n", - "Downloading data from 'https://uk1s3.embassy.ebi.ac.uk/public-datasets/bioimage.io/philosophical-panda/0.0.11/files/test_downsampled_0.npy' to file '/Users/qin/Library/Caches/bioimageio/d0abd68ef3844b0d6fbed811e0fed878-test_downsampled_0.npy'.\n", - "100%|█████████████████████████████████████| 88.5M/88.5M [00:00<00:00, 82.3GB/s]\n", - "computing SHA256 of d0abd68ef3844b0d6fbed811e0fed878-test_downsampled_0.npy (result: 67df53fb440e94dbb9c8e4003dcbde158646a7975c4878cacdd251e1fcfb4225): 100%|██████████| 88473728/88473728 [00:00<00:00, 2235146402.88it/s]\n", - "Downloading data from 'https://uk1s3.embassy.ebi.ac.uk/public-datasets/bioimage.io/philosophical-panda/0.0.11/files/test_downsampled_1.npy' to file '/Users/qin/Library/Caches/bioimageio/3765cac1d92a49daf0d6ec949919aeb1-test_downsampled_1.npy'.\n", - "100%|█████████████████████████████████████| 44.2M/44.2M [00:00<00:00, 52.2GB/s]\n", - "computing SHA256 of 3765cac1d92a49daf0d6ec949919aeb1-test_downsampled_1.npy (result: cb4addbd763d96731ebd18ed001b87ab7195ec9198f01a753a363a06c27bfb1c): 100%|██████████| 44236928/44236928 [00:00<00:00, 2150700977.83it/s]\n", - "Downloading data from 'https://uk1s3.embassy.ebi.ac.uk/public-datasets/bioimage.io/philosophical-panda/0.0.11/files/test_downsampled_2.npy' to file '/Users/qin/Library/Caches/bioimageio/7251078a2afa8713384a3103878dd09d-test_downsampled_2.npy'.\n", - "100%|█████████████████████████████████████| 22.1M/22.1M [00:00<00:00, 24.9GB/s]\n", - "computing SHA256 of 7251078a2afa8713384a3103878dd09d-test_downsampled_2.npy (result: 9c0225b94d84fcc3adfb9a73eef1303d6adb318b57a5a801e0e2e1638b458e72): 100%|██████████| 22118528/22118528 [00:00<00:00, 2035139420.08it/s]\n", - "Downloading data from 'https://uk1s3.embassy.ebi.ac.uk/public-datasets/bioimage.io/philosophical-panda/0.0.11/files/test_downsampled_3.npy' to file '/Users/qin/Library/Caches/bioimageio/e434cdc3ea3e7ecfb752cdc001617875-test_downsampled_3.npy'.\n", - "100%|█████████████████████████████████████| 11.1M/11.1M [00:00<00:00, 15.7GB/s]\n", - "computing SHA256 of e434cdc3ea3e7ecfb752cdc001617875-test_downsampled_3.npy (result: 1ea789ff37d47197c847b585799f7d063e7592b0c5e9c3094fd0e3ac209b7fc2): 100%|██████████| 11059328/11059328 [00:00<00:00, 1367718816.68it/s]\n", - "Downloading data from 'https://uk1s3.embassy.ebi.ac.uk/public-datasets/bioimage.io/philosophical-panda/0.0.11/files/cpnet_wrapper.py' to file '/Users/qin/Library/Caches/bioimageio/00bd170d6c9de6a391d6869f59058847-cpnet_wrapper.py'.\n", - "100%|█████████████████████████████████████| 11.1k/11.1k [00:00<00:00, 18.9MB/s]\n", - "computing SHA256 of 00bd170d6c9de6a391d6869f59058847-cpnet_wrapper.py (result: b8b947cdd0ea8f5b98bd7be5f12f38bb1ea1ebe0b455c62d9a6389cd21d134bf): 100%|██████████| 11053/11053 [00:00<00:00, 15037185.25it/s]\n", - "Downloading data from 'https://uk1s3.embassy.ebi.ac.uk/public-datasets/bioimage.io/philosophical-panda/0.0.11/files/cp_state_dict_1135_gold.pth' to file '/Users/qin/Library/Caches/bioimageio/8dbb20d5a3cb3a3dfdb5101a671861ce-cp_state_dict_1135_gold.pth'.\n", - "100%|█████████████████████████████████████| 26.6M/26.6M [00:00<00:00, 43.3GB/s]\n", - "computing SHA256 of 8dbb20d5a3cb3a3dfdb5101a671861ce-cp_state_dict_1135_gold.pth (result: 26c277f3b8f6ca5aab30b4b0a832601aea60183cbed1c2333576f4135a643eb2): 100%|██████████| 26556687/26556687 [00:00<00:00, 2103662363.99it/s]\n", - "Downloading data from 'https://uk1s3.embassy.ebi.ac.uk/public-datasets/bioimage.io/philosophical-panda/0.0.11/files/cp_traced_1135_gold.pt' to file '/Users/qin/Library/Caches/bioimageio/17fee110c39ccad7c3cb36d00d2fdd2c-cp_traced_1135_gold.pt'.\n", - "100%|█████████████████████████████████████| 26.8M/26.8M [00:00<00:00, 26.3GB/s]\n", - "computing SHA256 of 17fee110c39ccad7c3cb36d00d2fdd2c-cp_traced_1135_gold.pt (result: f61bae146ab522902350eadda1d509ac1037726fe6d7fb63f6a8a314021d63e7): 100%|██████████| 26812339/26812339 [00:00<00:00, 2032773000.69it/s]\n" - ] - }, - { - "ename": "AttributeError", - "evalue": "'NoneType' object has no attribute 'dim'", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mAttributeError\u001b[0m Traceback (most recent call last)", - "Cell \u001b[0;32mIn[15], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m temp \u001b[38;5;241m=\u001b[39m \u001b[43mpredict\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 2\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;66;43;03m# model=model_zoo.models_bioimageio.at['emotional-cricket', 'rdf_source'],\u001b[39;49;00m\n\u001b[1;32m 3\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;66;43;03m# model=model_zoo.models_bioimageio.at['efficient-chipmunk', 'rdf_source'],\u001b[39;49;00m\n\u001b[1;32m 4\u001b[0m \u001b[43m \u001b[49m\u001b[43mmodel\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmodel_zoo\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmodels_bioimageio\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mat\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43mphilosophical-panda\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43mrdf_source\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 5\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;66;43;03m# model='/Users/qin/Downloads/rdf.yaml',\u001b[39;49;00m\n\u001b[1;32m 6\u001b[0m \u001b[43m \u001b[49m\u001b[43minputs\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43msample\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 7\u001b[0m \u001b[43m \u001b[49m\u001b[43msample_id\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43msample\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[1;32m 8\u001b[0m \u001b[43m)\u001b[49m\n", - "File \u001b[0;32m~/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/bioimageio/core/prediction.py:114\u001b[0m, in \u001b[0;36mpredict\u001b[0;34m(model, inputs, sample_id, blocksize_parameter, input_block_shape, skip_preprocessing, skip_postprocessing, save_output_path)\u001b[0m\n\u001b[1;32m 107\u001b[0m output \u001b[38;5;241m=\u001b[39m pp\u001b[38;5;241m.\u001b[39mpredict_sample_with_blocking(\n\u001b[1;32m 108\u001b[0m sample,\n\u001b[1;32m 109\u001b[0m skip_preprocessing\u001b[38;5;241m=\u001b[39mskip_preprocessing,\n\u001b[1;32m 110\u001b[0m skip_postprocessing\u001b[38;5;241m=\u001b[39mskip_postprocessing,\n\u001b[1;32m 111\u001b[0m ns\u001b[38;5;241m=\u001b[39mblocksize_parameter,\n\u001b[1;32m 112\u001b[0m )\n\u001b[1;32m 113\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m--> 114\u001b[0m output \u001b[38;5;241m=\u001b[39m \u001b[43mpp\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mpredict_sample_without_blocking\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 115\u001b[0m \u001b[43m \u001b[49m\u001b[43msample\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 116\u001b[0m \u001b[43m \u001b[49m\u001b[43mskip_preprocessing\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mskip_preprocessing\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 117\u001b[0m \u001b[43m \u001b[49m\u001b[43mskip_postprocessing\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mskip_postprocessing\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 118\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 119\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m save_output_path:\n\u001b[1;32m 120\u001b[0m save_sample(save_output_path, output)\n", - "File \u001b[0;32m~/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/bioimageio/core/_prediction_pipeline.py:160\u001b[0m, in \u001b[0;36mPredictionPipeline.predict_sample_without_blocking\u001b[0;34m(self, sample, skip_preprocessing, skip_postprocessing)\u001b[0m\n\u001b[1;32m 152\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m skip_preprocessing:\n\u001b[1;32m 153\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mapply_preprocessing(sample)\n\u001b[1;32m 155\u001b[0m output \u001b[38;5;241m=\u001b[39m Sample(\n\u001b[1;32m 156\u001b[0m members\u001b[38;5;241m=\u001b[39m{\n\u001b[1;32m 157\u001b[0m out_id: out\n\u001b[1;32m 158\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m out_id, out \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mzip\u001b[39m(\n\u001b[1;32m 159\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_output_ids,\n\u001b[0;32m--> 160\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_adapter\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mforward\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 161\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43msample\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmembers\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget\u001b[49m\u001b[43m(\u001b[49m\u001b[43min_id\u001b[49m\u001b[43m)\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mfor\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43min_id\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;129;43;01min\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_input_ids\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 162\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m,\n\u001b[1;32m 163\u001b[0m )\n\u001b[1;32m 164\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m out \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[1;32m 165\u001b[0m },\n\u001b[1;32m 166\u001b[0m stat\u001b[38;5;241m=\u001b[39msample\u001b[38;5;241m.\u001b[39mstat,\n\u001b[1;32m 167\u001b[0m \u001b[38;5;28mid\u001b[39m\u001b[38;5;241m=\u001b[39msample\u001b[38;5;241m.\u001b[39mid,\n\u001b[1;32m 168\u001b[0m )\n\u001b[1;32m 169\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m skip_postprocessing:\n\u001b[1;32m 170\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mapply_postprocessing(output)\n", - "File \u001b[0;32m~/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/bioimageio/core/model_adapters/_pytorch_model_adapter.py:71\u001b[0m, in \u001b[0;36mPytorchModelAdapter.forward\u001b[0;34m(self, *input_tensors)\u001b[0m\n\u001b[1;32m 60\u001b[0m tensors \u001b[38;5;241m=\u001b[39m [\n\u001b[1;32m 61\u001b[0m (\n\u001b[1;32m 62\u001b[0m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 68\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m t \u001b[38;5;129;01min\u001b[39;00m tensors\n\u001b[1;32m 69\u001b[0m ]\n\u001b[1;32m 70\u001b[0m result: Union[Tuple[Any, \u001b[38;5;241m.\u001b[39m\u001b[38;5;241m.\u001b[39m\u001b[38;5;241m.\u001b[39m], List[Any], Any]\n\u001b[0;32m---> 71\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_network\u001b[49m\u001b[43m(\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;66;43;03m# pyright: ignore[reportUnknownVariableType]\u001b[39;49;00m\n\u001b[1;32m 72\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mtensors\u001b[49m\n\u001b[1;32m 73\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 74\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(result, (\u001b[38;5;28mtuple\u001b[39m, \u001b[38;5;28mlist\u001b[39m)):\n\u001b[1;32m 75\u001b[0m result \u001b[38;5;241m=\u001b[39m [result]\n", - "File \u001b[0;32m~/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/torch/nn/modules/module.py:1511\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1509\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[1;32m 1510\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m-> 1511\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[0;32m~/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/torch/nn/modules/module.py:1520\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1515\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m 1516\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[1;32m 1517\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[1;32m 1518\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1519\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1520\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1522\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m 1523\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n", - "File \u001b[0;32m~/Library/Caches/bioimageio/00bd170d6c9de6a391d6869f59058847-cpnet_wrapper.py:283\u001b[0m, in \u001b[0;36mCPnetBioImageIO.forward\u001b[0;34m(self, x)\u001b[0m\n\u001b[1;32m 273\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mforward\u001b[39m(\u001b[38;5;28mself\u001b[39m, x):\n\u001b[1;32m 274\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[1;32m 275\u001b[0m \u001b[38;5;124;03m Perform a forward pass of the CPnet model and return unpacked tensors.\u001b[39;00m\n\u001b[1;32m 276\u001b[0m \n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 281\u001b[0m \u001b[38;5;124;03m tuple: A tuple containing the output tensor, style tensor, and downsampled tensors.\u001b[39;00m\n\u001b[1;32m 282\u001b[0m \u001b[38;5;124;03m \"\"\"\u001b[39;00m\n\u001b[0;32m--> 283\u001b[0m output_tensor, style_tensor, downsampled_tensors \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43msuper\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mforward\u001b[49m\u001b[43m(\u001b[49m\u001b[43mx\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 284\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m output_tensor, style_tensor, \u001b[38;5;241m*\u001b[39mdownsampled_tensors\n", - "File \u001b[0;32m~/Library/Caches/bioimageio/00bd170d6c9de6a391d6869f59058847-cpnet_wrapper.py:207\u001b[0m, in \u001b[0;36mCPnet.forward\u001b[0;34m(self, data)\u001b[0m\n\u001b[1;32m 205\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mmkldnn:\n\u001b[1;32m 206\u001b[0m data \u001b[38;5;241m=\u001b[39m data\u001b[38;5;241m.\u001b[39mto_mkldnn()\n\u001b[0;32m--> 207\u001b[0m T0 \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdownsample\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdata\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 208\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mmkldnn:\n\u001b[1;32m 209\u001b[0m style \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mmake_style(T0[\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m]\u001b[38;5;241m.\u001b[39mto_dense())\n", - "File \u001b[0;32m~/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/torch/nn/modules/module.py:1511\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1509\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[1;32m 1510\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m-> 1511\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[0;32m~/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/torch/nn/modules/module.py:1520\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1515\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m 1516\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[1;32m 1517\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[1;32m 1518\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1519\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1520\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1522\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m 1523\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n", - "File \u001b[0;32m~/Library/Caches/bioimageio/00bd170d6c9de6a391d6869f59058847-cpnet_wrapper.py:60\u001b[0m, in \u001b[0;36mdownsample.forward\u001b[0;34m(self, x)\u001b[0m\n\u001b[1;32m 58\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 59\u001b[0m y \u001b[38;5;241m=\u001b[39m x\n\u001b[0;32m---> 60\u001b[0m xd\u001b[38;5;241m.\u001b[39mappend(\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdown\u001b[49m\u001b[43m[\u001b[49m\u001b[43mn\u001b[49m\u001b[43m]\u001b[49m\u001b[43m(\u001b[49m\u001b[43my\u001b[49m\u001b[43m)\u001b[49m)\n\u001b[1;32m 61\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m xd\n", - "File \u001b[0;32m~/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/torch/nn/modules/module.py:1511\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1509\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[1;32m 1510\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m-> 1511\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[0;32m~/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/torch/nn/modules/module.py:1520\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1515\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m 1516\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[1;32m 1517\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[1;32m 1518\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1519\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1520\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1522\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m 1523\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n", - "File \u001b[0;32m~/Library/Caches/bioimageio/00bd170d6c9de6a391d6869f59058847-cpnet_wrapper.py:37\u001b[0m, in \u001b[0;36mresdown.forward\u001b[0;34m(self, x)\u001b[0m\n\u001b[1;32m 36\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mforward\u001b[39m(\u001b[38;5;28mself\u001b[39m, x):\n\u001b[0;32m---> 37\u001b[0m x \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mproj\u001b[49m\u001b[43m(\u001b[49m\u001b[43mx\u001b[49m\u001b[43m)\u001b[49m \u001b[38;5;241m+\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mconv[\u001b[38;5;241m1\u001b[39m](\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mconv[\u001b[38;5;241m0\u001b[39m](x))\n\u001b[1;32m 38\u001b[0m x \u001b[38;5;241m=\u001b[39m x \u001b[38;5;241m+\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mconv[\u001b[38;5;241m3\u001b[39m](\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mconv[\u001b[38;5;241m2\u001b[39m](x))\n\u001b[1;32m 39\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m x\n", - "File \u001b[0;32m~/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/torch/nn/modules/module.py:1511\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1509\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[1;32m 1510\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m-> 1511\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[0;32m~/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/torch/nn/modules/module.py:1520\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1515\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m 1516\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[1;32m 1517\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[1;32m 1518\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1519\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1520\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1522\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m 1523\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n", - "File \u001b[0;32m~/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/torch/nn/modules/container.py:217\u001b[0m, in \u001b[0;36mSequential.forward\u001b[0;34m(self, input)\u001b[0m\n\u001b[1;32m 215\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mforward\u001b[39m(\u001b[38;5;28mself\u001b[39m, \u001b[38;5;28minput\u001b[39m):\n\u001b[1;32m 216\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m module \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mself\u001b[39m:\n\u001b[0;32m--> 217\u001b[0m \u001b[38;5;28minput\u001b[39m \u001b[38;5;241m=\u001b[39m \u001b[43mmodule\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[1;32m 218\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28minput\u001b[39m\n", - "File \u001b[0;32m~/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/torch/nn/modules/module.py:1511\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1509\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[1;32m 1510\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m-> 1511\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[0;32m~/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/torch/nn/modules/module.py:1520\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1515\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m 1516\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[1;32m 1517\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[1;32m 1518\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1519\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1520\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1522\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m 1523\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n", - "File \u001b[0;32m~/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/torch/nn/modules/batchnorm.py:142\u001b[0m, in \u001b[0;36m_BatchNorm.forward\u001b[0;34m(self, input)\u001b[0m\n\u001b[1;32m 141\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mforward\u001b[39m(\u001b[38;5;28mself\u001b[39m, \u001b[38;5;28minput\u001b[39m: Tensor) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m Tensor:\n\u001b[0;32m--> 142\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_check_input_dim\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[1;32m 144\u001b[0m \u001b[38;5;66;03m# exponential_average_factor is set to self.momentum\u001b[39;00m\n\u001b[1;32m 145\u001b[0m \u001b[38;5;66;03m# (when it is available) only so that it gets updated\u001b[39;00m\n\u001b[1;32m 146\u001b[0m \u001b[38;5;66;03m# in ONNX graph when this node is exported to ONNX.\u001b[39;00m\n\u001b[1;32m 147\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mmomentum \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n", - "File \u001b[0;32m~/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/torch/nn/modules/batchnorm.py:419\u001b[0m, in \u001b[0;36mBatchNorm2d._check_input_dim\u001b[0;34m(self, input)\u001b[0m\n\u001b[1;32m 418\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_check_input_dim\u001b[39m(\u001b[38;5;28mself\u001b[39m, \u001b[38;5;28minput\u001b[39m):\n\u001b[0;32m--> 419\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28;43minput\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdim\u001b[49m() \u001b[38;5;241m!=\u001b[39m \u001b[38;5;241m4\u001b[39m:\n\u001b[1;32m 420\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mexpected 4D input (got \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28minput\u001b[39m\u001b[38;5;241m.\u001b[39mdim()\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124mD input)\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n", - "\u001b[0;31mAttributeError\u001b[0m: 'NoneType' object has no attribute 'dim'" - ] - } - ], - "source": [ - "temp = predict(\n", - " # model=model_zoo.models_bioimageio.at['emotional-cricket', 'rdf_source'],\n", - " # model=model_zoo.models_bioimageio.at['efficient-chipmunk', 'rdf_source'],\n", - " model=model_zoo.models_bioimageio.at['philosophical-panda', 'rdf_source'],\n", - " # model='/Users/qin/Downloads/rdf.yaml',\n", - " inputs=sample,\n", - " sample_id='sample',\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "((1, 32, 64, 64), (64, 64, 32, 1))" - ] - }, - "execution_count": 23, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "np.random.rand(1, 32, 64, 64).shape, np.random.rand(64, 64, 32, 1).shape" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "from plantseg.functionals.dataprocessing.dataprocessing import ImageLayout" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "typing.Literal['ZYX', 'YX', 'CZYX', 'CYX']" - ] - }, - "execution_count": 2, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "ImageLayout" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "('Z', 'Y', 'X')" - ] - }, - "execution_count": 6, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "tuple('ZYX')" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "INFO: P [MainThread] 2024-12-14 01:09:58,630 plantseg - Logger configured at initialisation. PlantSeg logger name: plantseg\n" - ] - } - ], - "source": [ - "from pathlib import Path\n", - "from plantseg.functionals.prediction.prediction import biio_prediction\n", - "from plantseg.io import smart_load" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "(453, 800, 800)" - ] - }, - "execution_count": 5, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "raw = smart_load(\n", - " Path('/Users/qin/Documents/Work/Side_rAP2_16LDs_SAM4_nuclei_Z0.400_X0.291_Y0.291_Sz453_Sx800_Sy800.tif')\n", - ")\n", - "raw.shape" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "(128, 128, 128)" - ] - }, - "execution_count": 6, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "raw = smart_load(\n", - " Path('/Users/qin/Documents/Work/Side_rAP2_16LDs_SAM4_nuclei_Z0.400_X0.291_Y0.291_Sz453_Sx800_Sy800.tif')\n", - ")[200:200+128, 300:300+128, 300:300+128]\n", - "raw.shape" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\u001b[32m2024-12-14 00:35:41.719\u001b[0m | \u001b[1mINFO \u001b[0m | \u001b[36mbioimageio.spec._internal.io_utils\u001b[0m:\u001b[36mopen_bioimageio_yaml\u001b[0m:\u001b[36m131\u001b[0m - \u001b[1mloading emotional-cricket from https://uk1s3.embassy.ebi.ac.uk/public-datasets/bioimage.io/emotional-cricket/1.1/files/rdf.yaml\u001b[0m\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "INFO: P [MainThread] 2024-12-14 00:35:41,809 plantseg.functionals.prediction.prediction - model expects these inputs: ['raw']\n" - ] - } - ], - "source": [ - "out = biio_prediction(\n", - " raw=raw,\n", - " input_layout='ZYX',\n", - " # model_id='efficient-chipmunk',\n", - " model_id='emotional-cricket',\n", - " # model_id='/Users/qin/Downloads/efficient-chipmunk.yaml',\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": 32, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "True" - ] - }, - "execution_count": 32, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "len(out.members) == 1" - ] - }, - { - "cell_type": "code", - "execution_count": 28, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "['output0']" - ] - }, - "execution_count": 28, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "list(out.members)" - ] - }, - { - "cell_type": "code", - "execution_count": 31, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[array([[[[[1.23823965e-02, 3.71413166e-03, 2.64202990e-03, ...,\n", - " 2.41754763e-03, 4.86477558e-03, 2.36471929e-02],\n", - " [2.31429259e-03, 7.02380203e-04, 4.09954780e-04, ...,\n", - " 2.52719648e-04, 7.57241389e-04, 5.46071166e-03],\n", - " [1.33902160e-03, 3.50114511e-04, 2.14061569e-04, ...,\n", - " 7.40970063e-05, 2.46272044e-04, 2.46915710e-03],\n", - " ...,\n", - " [8.08374316e-04, 7.53455024e-05, 2.44885123e-05, ...,\n", - " 8.84886977e-05, 3.21889500e-04, 2.94925272e-03],\n", - " [1.35905389e-03, 1.61183911e-04, 6.91879104e-05, ...,\n", - " 2.89193413e-04, 6.93224138e-04, 4.70515573e-03],\n", - " [9.65875201e-03, 1.88575720e-03, 1.10870227e-03, ...,\n", - " 2.58681760e-03, 3.97690758e-03, 1.58278439e-02]],\n", - " \n", - " [[3.34995706e-03, 7.49400002e-04, 4.44106583e-04, ...,\n", - " 4.23803140e-04, 1.16107799e-03, 7.32442131e-03],\n", - " [4.63154254e-04, 1.31924564e-04, 5.75331833e-05, ...,\n", - " 2.48063916e-05, 1.31788765e-04, 1.28855614e-03],\n", - " [1.84829667e-04, 4.22494304e-05, 1.93375345e-05, ...,\n", - " 3.12912471e-06, 2.20445181e-05, 3.40537110e-04],\n", - " ...,\n", - " [1.03433682e-04, 4.99963107e-06, 7.01059491e-07, ...,\n", - " 5.13319992e-06, 4.15788127e-05, 5.11675724e-04],\n", - " [2.40331719e-04, 2.02179817e-05, 4.81432517e-06, ...,\n", - " 2.36316791e-05, 1.03607512e-04, 9.28838330e-04],\n", - " [2.54508085e-03, 3.50280927e-04, 1.46785387e-04, ...,\n", - " 3.32942145e-04, 8.13076796e-04, 4.12234711e-03]],\n", - " \n", - " [[1.35694118e-03, 1.68786763e-04, 7.48198290e-05, ...,\n", - " 2.81019224e-04, 7.46771577e-04, 5.51791256e-03],\n", - " [1.08341432e-04, 1.00760262e-05, 2.75844582e-06, ...,\n", - " 8.99239421e-06, 5.42080925e-05, 7.20382726e-04],\n", - " [3.48077228e-05, 2.02582328e-06, 5.01538182e-07, ...,\n", - " 5.80834751e-07, 4.99987436e-06, 1.27662483e-04],\n", - " ...,\n", - " [1.09947527e-04, 5.69427766e-06, 8.12078270e-07, ...,\n", - " 1.42909403e-06, 1.42246154e-05, 2.88977870e-04],\n", - " [2.98362691e-04, 3.16787191e-05, 7.62458285e-06, ...,\n", - " 5.10403333e-06, 3.06438706e-05, 4.58879425e-04],\n", - " [2.99305934e-03, 5.03332238e-04, 2.14670028e-04, ...,\n", - " 1.38354051e-04, 4.05884377e-04, 2.70675565e-03]],\n", - " \n", - " ...,\n", - " \n", - " [[3.29867308e-03, 4.56455105e-04, 1.34146132e-04, ...,\n", - " 9.69568064e-05, 3.37075238e-04, 2.77165999e-03],\n", - " [4.24169732e-04, 3.51630406e-05, 5.06967717e-06, ...,\n", - " 5.14997964e-06, 3.60815611e-05, 4.70222265e-04],\n", - " [9.61356855e-05, 5.18314391e-06, 7.80053995e-07, ...,\n", - " 2.31303488e-06, 1.90853134e-05, 3.29483621e-04],\n", - " ...,\n", - " [8.78510036e-06, 3.06934055e-07, 1.15704395e-07, ...,\n", - " 4.02947308e-07, 3.91793219e-06, 1.21722005e-04],\n", - " [3.12283155e-05, 1.88902447e-06, 9.24211236e-07, ...,\n", - " 1.67769019e-06, 1.39318608e-05, 3.06272268e-04],\n", - " [9.07270878e-04, 9.94062648e-05, 5.61416928e-05, ...,\n", - " 5.73289071e-05, 2.48869706e-04, 2.29152758e-03]],\n", - " \n", - " [[5.91580058e-03, 1.10559561e-03, 3.32048250e-04, ...,\n", - " 2.32103208e-04, 6.34685624e-04, 4.07362822e-03],\n", - " [9.84633924e-04, 1.30795612e-04, 2.20851525e-05, ...,\n", - " 1.67520193e-05, 1.00321871e-04, 8.62589630e-04],\n", - " [2.57799606e-04, 2.33190221e-05, 3.75139280e-06, ...,\n", - " 7.50424260e-06, 5.11594008e-05, 5.78730425e-04],\n", - " ...,\n", - " [1.69782816e-05, 6.66840947e-07, 1.38846460e-07, ...,\n", - " 1.80018546e-07, 2.14967281e-06, 9.65387953e-05],\n", - " [5.67699681e-05, 3.36161565e-06, 1.02050706e-06, ...,\n", - " 1.21392122e-06, 1.14076483e-05, 2.88846204e-04],\n", - " [1.40678498e-03, 1.51812113e-04, 6.07411748e-05, ...,\n", - " 5.12399092e-05, 2.33667044e-04, 2.21933913e-03]],\n", - " \n", - " [[2.29063556e-02, 6.30153902e-03, 2.76722992e-03, ...,\n", - " 2.20514974e-03, 4.18286538e-03, 1.75831020e-02],\n", - " [4.68514580e-03, 7.92992418e-04, 3.06714675e-04, ...,\n", - " 2.61121342e-04, 7.31239910e-04, 4.39845258e-03],\n", - " [1.91156555e-03, 2.52487953e-04, 1.00966608e-04, ...,\n", - " 1.51911445e-04, 4.64357348e-04, 3.32914991e-03],\n", - " ...,\n", - " [7.02249992e-04, 5.80488013e-05, 1.94775675e-05, ...,\n", - " 8.09186895e-06, 4.56816706e-05, 9.44769825e-04],\n", - " [1.51418359e-03, 1.61546574e-04, 7.56840236e-05, ...,\n", - " 3.58892830e-05, 1.60053183e-04, 2.05396442e-03],\n", - " [1.25697535e-02, 2.19693198e-03, 1.26698730e-03, ...,\n", - " 6.89297158e-04, 1.90667005e-03, 1.04174931e-02]]]]],\n", - " dtype=float32)]" - ] - }, - "execution_count": 31, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "[tensor.data.to_numpy() for tensor in out.members.values()]" - ] - }, - { - "cell_type": "code", - "execution_count": 24, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "array([[[[[1.23823965e-02, 3.71413166e-03, 2.64202990e-03, ...,\n", - " 2.41754763e-03, 4.86477558e-03, 2.36471929e-02],\n", - " [2.31429259e-03, 7.02380203e-04, 4.09954780e-04, ...,\n", - " 2.52719648e-04, 7.57241389e-04, 5.46071166e-03],\n", - " [1.33902160e-03, 3.50114511e-04, 2.14061569e-04, ...,\n", - " 7.40970063e-05, 2.46272044e-04, 2.46915710e-03],\n", - " ...,\n", - " [8.08374316e-04, 7.53455024e-05, 2.44885123e-05, ...,\n", - " 8.84886977e-05, 3.21889500e-04, 2.94925272e-03],\n", - " [1.35905389e-03, 1.61183911e-04, 6.91879104e-05, ...,\n", - " 2.89193413e-04, 6.93224138e-04, 4.70515573e-03],\n", - " [9.65875201e-03, 1.88575720e-03, 1.10870227e-03, ...,\n", - " 2.58681760e-03, 3.97690758e-03, 1.58278439e-02]],\n", - "\n", - " [[3.34995706e-03, 7.49400002e-04, 4.44106583e-04, ...,\n", - " 4.23803140e-04, 1.16107799e-03, 7.32442131e-03],\n", - " [4.63154254e-04, 1.31924564e-04, 5.75331833e-05, ...,\n", - " 2.48063916e-05, 1.31788765e-04, 1.28855614e-03],\n", - " [1.84829667e-04, 4.22494304e-05, 1.93375345e-05, ...,\n", - " 3.12912471e-06, 2.20445181e-05, 3.40537110e-04],\n", - " ...,\n", - " [1.03433682e-04, 4.99963107e-06, 7.01059491e-07, ...,\n", - " 5.13319992e-06, 4.15788127e-05, 5.11675724e-04],\n", - " [2.40331719e-04, 2.02179817e-05, 4.81432517e-06, ...,\n", - " 2.36316791e-05, 1.03607512e-04, 9.28838330e-04],\n", - " [2.54508085e-03, 3.50280927e-04, 1.46785387e-04, ...,\n", - " 3.32942145e-04, 8.13076796e-04, 4.12234711e-03]],\n", - "\n", - " [[1.35694118e-03, 1.68786763e-04, 7.48198290e-05, ...,\n", - " 2.81019224e-04, 7.46771577e-04, 5.51791256e-03],\n", - " [1.08341432e-04, 1.00760262e-05, 2.75844582e-06, ...,\n", - " 8.99239421e-06, 5.42080925e-05, 7.20382726e-04],\n", - " [3.48077228e-05, 2.02582328e-06, 5.01538182e-07, ...,\n", - " 5.80834751e-07, 4.99987436e-06, 1.27662483e-04],\n", - " ...,\n", - " [1.09947527e-04, 5.69427766e-06, 8.12078270e-07, ...,\n", - " 1.42909403e-06, 1.42246154e-05, 2.88977870e-04],\n", - " [2.98362691e-04, 3.16787191e-05, 7.62458285e-06, ...,\n", - " 5.10403333e-06, 3.06438706e-05, 4.58879425e-04],\n", - " [2.99305934e-03, 5.03332238e-04, 2.14670028e-04, ...,\n", - " 1.38354051e-04, 4.05884377e-04, 2.70675565e-03]],\n", - "\n", - " ...,\n", - "\n", - " [[3.29867308e-03, 4.56455105e-04, 1.34146132e-04, ...,\n", - " 9.69568064e-05, 3.37075238e-04, 2.77165999e-03],\n", - " [4.24169732e-04, 3.51630406e-05, 5.06967717e-06, ...,\n", - " 5.14997964e-06, 3.60815611e-05, 4.70222265e-04],\n", - " [9.61356855e-05, 5.18314391e-06, 7.80053995e-07, ...,\n", - " 2.31303488e-06, 1.90853134e-05, 3.29483621e-04],\n", - " ...,\n", - " [8.78510036e-06, 3.06934055e-07, 1.15704395e-07, ...,\n", - " 4.02947308e-07, 3.91793219e-06, 1.21722005e-04],\n", - " [3.12283155e-05, 1.88902447e-06, 9.24211236e-07, ...,\n", - " 1.67769019e-06, 1.39318608e-05, 3.06272268e-04],\n", - " [9.07270878e-04, 9.94062648e-05, 5.61416928e-05, ...,\n", - " 5.73289071e-05, 2.48869706e-04, 2.29152758e-03]],\n", - "\n", - " [[5.91580058e-03, 1.10559561e-03, 3.32048250e-04, ...,\n", - " 2.32103208e-04, 6.34685624e-04, 4.07362822e-03],\n", - " [9.84633924e-04, 1.30795612e-04, 2.20851525e-05, ...,\n", - " 1.67520193e-05, 1.00321871e-04, 8.62589630e-04],\n", - " [2.57799606e-04, 2.33190221e-05, 3.75139280e-06, ...,\n", - " 7.50424260e-06, 5.11594008e-05, 5.78730425e-04],\n", - " ...,\n", - " [1.69782816e-05, 6.66840947e-07, 1.38846460e-07, ...,\n", - " 1.80018546e-07, 2.14967281e-06, 9.65387953e-05],\n", - " [5.67699681e-05, 3.36161565e-06, 1.02050706e-06, ...,\n", - " 1.21392122e-06, 1.14076483e-05, 2.88846204e-04],\n", - " [1.40678498e-03, 1.51812113e-04, 6.07411748e-05, ...,\n", - " 5.12399092e-05, 2.33667044e-04, 2.21933913e-03]],\n", - "\n", - " [[2.29063556e-02, 6.30153902e-03, 2.76722992e-03, ...,\n", - " 2.20514974e-03, 4.18286538e-03, 1.75831020e-02],\n", - " [4.68514580e-03, 7.92992418e-04, 3.06714675e-04, ...,\n", - " 2.61121342e-04, 7.31239910e-04, 4.39845258e-03],\n", - " [1.91156555e-03, 2.52487953e-04, 1.00966608e-04, ...,\n", - " 1.51911445e-04, 4.64357348e-04, 3.32914991e-03],\n", - " ...,\n", - " [7.02249992e-04, 5.80488013e-05, 1.94775675e-05, ...,\n", - " 8.09186895e-06, 4.56816706e-05, 9.44769825e-04],\n", - " [1.51418359e-03, 1.61546574e-04, 7.56840236e-05, ...,\n", - " 3.58892830e-05, 1.60053183e-04, 2.05396442e-03],\n", - " [1.25697535e-02, 2.19693198e-03, 1.26698730e-03, ...,\n", - " 6.89297158e-04, 1.90667005e-03, 1.04174931e-02]]]]],\n", - " dtype=float32)" - ] - }, - "execution_count": 24, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "out.members['output0'].data.to_numpy()" - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "{'output0': }" - ] - }, - "execution_count": 17, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "out.members." - ] - }, - { - "cell_type": "code", - "execution_count": 34, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "array([[[ 2, 3, 0, ..., 1, 3, 0],\n", - " [ 0, 0, 0, ..., 0, 1, 0],\n", - " [ 3, 3, 2, ..., 0, 1, 0],\n", - " ...,\n", - " [ 3, 4, 2, ..., 1, 2, 1],\n", - " [ 4, 1, 2, ..., 6, 5, 3],\n", - " [ 3, 2, 2, ..., 4, 5, 1]],\n", - "\n", - " [[ 2, 2, 4, ..., 5, 8, 9],\n", - " [ 2, 1, 6, ..., 1, 0, 2],\n", - " [ 5, 3, 0, ..., 3, 6, 5],\n", - " ...,\n", - " [ 1, 5, 8, ..., 2, 1, 1],\n", - " [ 3, 3, 6, ..., 2, 3, 3],\n", - " [ 0, 0, 0, ..., 3, 2, 2]],\n", - "\n", - " [[ 1, 5, 3, ..., 1, 3, 1],\n", - " [ 5, 3, 8, ..., 2, 5, 6],\n", - " [ 1, 2, 5, ..., 2, 1, 3],\n", - " ...,\n", - " [ 0, 0, 3, ..., 3, 2, 3],\n", - " [ 5, 2, 2, ..., 2, 2, 1],\n", - " [ 6, 2, 3, ..., 8, 2, 6]],\n", - "\n", - " ...,\n", - "\n", - " [[ 3, 7, 6, ..., 2, 9, 7],\n", - " [ 5, 9, 8, ..., 2, 2, 5],\n", - " [ 4, 1, 2, ..., 6, 6, 3],\n", - " ...,\n", - " [ 0, 3, 6, ..., 2, 3, 3],\n", - " [ 2, 1, 3, ..., 0, 3, 2],\n", - " [ 5, 5, 1, ..., 4, 1, 5]],\n", - "\n", - " [[ 9, 6, 5, ..., 2, 3, 3],\n", - " [ 6, 4, 3, ..., 4, 2, 0],\n", - " [ 3, 4, 6, ..., 3, 5, 3],\n", - " ...,\n", - " [ 4, 2, 4, ..., 2, 6, 4],\n", - " [ 0, 0, 4, ..., 0, 2, 2],\n", - " [26, 0, 0, ..., 2, 2, 2]],\n", - "\n", - " [[ 4, 2, 4, ..., 0, 1, 1],\n", - " [ 1, 1, 1, ..., 0, 2, 1],\n", - " [ 2, 3, 4, ..., 6, 1, 2],\n", - " ...,\n", - " [ 2, 1, 2, ..., 2, 1, 2],\n", - " [ 4, 6, 3, ..., 1, 1, 2],\n", - " [ 1, 1, 2, ..., 1, 0, 0]]], dtype=uint16)" - ] - }, - "execution_count": 34, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "raw" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [], - "source": [ - "from plantseg.io import create_tiff\n", - "from plantseg.io.voxelsize import VoxelSize" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [], - "source": [ - "create_tiff(Path('/Users/qin/Documents/Work/small_3D_crop.tif'), raw, VoxelSize(voxels_size=(1, 1, 1)))" - ] - }, - { - "cell_type": "code", - "execution_count": 42, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "VoxelSize(voxels_size=None, unit='um')" - ] - }, - "execution_count": 42, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "VoxelSize()" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [], - "source": [ - "from bioimageio.spec import load_model_description\n", - "from bioimageio.core.axis import AxisId" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\u001b[32m2024-12-16 14:36:24.489\u001b[0m | \u001b[1mINFO \u001b[0m | \u001b[36mbioimageio.spec._internal.io_utils\u001b[0m:\u001b[36mopen_bioimageio_yaml\u001b[0m:\u001b[36m131\u001b[0m - \u001b[1mloading philosophical-panda from https://uk1s3.embassy.ebi.ac.uk/public-datasets/bioimage.io/philosophical-panda/0.0.11/files/rdf.yaml\u001b[0m\n", - "computing SHA256 of 101853864f8c8e986b2819c9ac44d0f9-test_input.npy (result: 6810255f5b5260fe39153f2192bedf30d9899ec4e770976b7813116c467579f0): 100%|██████████| 5529728/5529728 [00:00<00:00, 1344776498.48it/s]\n", - "computing SHA256 of 57b964a123db3ff8400bcce3ef902b18-test_output.npy (result: d802e3024da80bff93a9ec50fbe50b9c3946534aab1b60b911511111a8e2dbca): 100%|██████████| 8294528/8294528 [00:00<00:00, 1573628187.47it/s]\n", - "computing SHA256 of 0b712fa8abf8707021a71747e726bf7c-test_style.npy (result: ab464b406f9050561b40f7d76700ab5edf3aca97e31fe9a6069a51aeeca8bc81): 100%|██████████| 76928/76928 [00:00<00:00, 213681733.85it/s]\n", - "computing SHA256 of d0abd68ef3844b0d6fbed811e0fed878-test_downsampled_0.npy (result: 67df53fb440e94dbb9c8e4003dcbde158646a7975c4878cacdd251e1fcfb4225): 100%|██████████| 88473728/88473728 [00:00<00:00, 2695492167.77it/s]\n", - "computing SHA256 of 3765cac1d92a49daf0d6ec949919aeb1-test_downsampled_1.npy (result: cb4addbd763d96731ebd18ed001b87ab7195ec9198f01a753a363a06c27bfb1c): 100%|██████████| 44236928/44236928 [00:00<00:00, 2611701702.60it/s]\n", - "computing SHA256 of 7251078a2afa8713384a3103878dd09d-test_downsampled_2.npy (result: 9c0225b94d84fcc3adfb9a73eef1303d6adb318b57a5a801e0e2e1638b458e72): 100%|██████████| 22118528/22118528 [00:00<00:00, 2409657934.14it/s]\n", - "computing SHA256 of e434cdc3ea3e7ecfb752cdc001617875-test_downsampled_3.npy (result: 1ea789ff37d47197c847b585799f7d063e7592b0c5e9c3094fd0e3ac209b7fc2): 100%|██████████| 11059328/11059328 [00:00<00:00, 2449888225.82it/s]\n", - "computing SHA256 of 00bd170d6c9de6a391d6869f59058847-cpnet_wrapper.py (result: b8b947cdd0ea8f5b98bd7be5f12f38bb1ea1ebe0b455c62d9a6389cd21d134bf): 100%|██████████| 11053/11053 [00:00<00:00, 34315057.08it/s]\n", - "computing SHA256 of 8dbb20d5a3cb3a3dfdb5101a671861ce-cp_state_dict_1135_gold.pth (result: 26c277f3b8f6ca5aab30b4b0a832601aea60183cbed1c2333576f4135a643eb2): 100%|██████████| 26556687/26556687 [00:00<00:00, 2154733982.88it/s]\n", - "computing SHA256 of 17fee110c39ccad7c3cb36d00d2fdd2c-cp_traced_1135_gold.pt (result: f61bae146ab522902350eadda1d509ac1037726fe6d7fb63f6a8a314021d63e7): 100%|██████████| 26812339/26812339 [00:00<00:00, 2652775239.24it/s]\n" - ] - } - ], - "source": [ - "model_id = 'philosophical-panda'\n", - "# model_id = 'emotional-cricket'\n", - "model = load_model_description(model_id)" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/Users/qin/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/bioimageio/spec/_internal/io.py:351: UserWarning: dumping with mode='python' is currently not fully supported for fields that are included when packaging; returned objects are standard python objects\n", - " warnings.warn(\n", - "/Users/qin/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/pydantic/_internal/_serializers.py:42: UserWarning: Pydantic serializer warnings:\n", - " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxis` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=1.0)` - serialized value may not be as expected\n", - " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxisWithHalo` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=1.0)` - serialized value may not be as expected\n", - " v = handler(item, index)\n", - "/Users/qin/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/pydantic/_internal/_serializers.py:42: UserWarning: Pydantic serializer warnings:\n", - " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxis` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=1.0)` - serialized value may not be as expected\n", - " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxisWithHalo` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=1.0)` - serialized value may not be as expected\n", - " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxis` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=1.0)` - serialized value may not be as expected\n", - " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxisWithHalo` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=1.0)` - serialized value may not be as expected\n", - " v = handler(item, index)\n", - "/Users/qin/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/pydantic/_internal/_serializers.py:42: UserWarning: Pydantic serializer warnings:\n", - " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxis` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=1.0)` - serialized value may not be as expected\n", - " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxisWithHalo` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=1.0)` - serialized value may not be as expected\n", - " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxis` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=1.0)` - serialized value may not be as expected\n", - " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxisWithHalo` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=1.0)` - serialized value may not be as expected\n", - " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxis` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=1.0)` - serialized value may not be as expected\n", - " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxisWithHalo` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=1.0)` - serialized value may not be as expected\n", - " v = handler(item, index)\n", - "/Users/qin/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/pydantic/_internal/_serializers.py:42: UserWarning: Pydantic serializer warnings:\n", - " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxis` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=1.0)` - serialized value may not be as expected\n", - " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxisWithHalo` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=1.0)` - serialized value may not be as expected\n", - " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxis` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=2.0)` - serialized value may not be as expected\n", - " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxisWithHalo` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=2.0)` - serialized value may not be as expected\n", - " v = handler(item, index)\n", - "/Users/qin/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/pydantic/_internal/_serializers.py:42: UserWarning: Pydantic serializer warnings:\n", - " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxis` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=1.0)` - serialized value may not be as expected\n", - " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxisWithHalo` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=1.0)` - serialized value may not be as expected\n", - " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxis` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=2.0)` - serialized value may not be as expected\n", - " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxisWithHalo` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=2.0)` - serialized value may not be as expected\n", - " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxis` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=2.0)` - serialized value may not be as expected\n", - " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxisWithHalo` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=2.0)` - serialized value may not be as expected\n", - " v = handler(item, index)\n", - "/Users/qin/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/pydantic/_internal/_serializers.py:42: UserWarning: Pydantic serializer warnings:\n", - " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxis` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=1.0)` - serialized value may not be as expected\n", - " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxisWithHalo` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=1.0)` - serialized value may not be as expected\n", - " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxis` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=4.0)` - serialized value may not be as expected\n", - " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxisWithHalo` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=4.0)` - serialized value may not be as expected\n", - " v = handler(item, index)\n", - "/Users/qin/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/pydantic/_internal/_serializers.py:42: UserWarning: Pydantic serializer warnings:\n", - " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxis` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=1.0)` - serialized value may not be as expected\n", - " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxisWithHalo` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=1.0)` - serialized value may not be as expected\n", - " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxis` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=4.0)` - serialized value may not be as expected\n", - " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxisWithHalo` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=4.0)` - serialized value may not be as expected\n", - " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxis` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=4.0)` - serialized value may not be as expected\n", - " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxisWithHalo` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=4.0)` - serialized value may not be as expected\n", - " v = handler(item, index)\n", - "/Users/qin/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/pydantic/_internal/_serializers.py:42: UserWarning: Pydantic serializer warnings:\n", - " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxis` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=1.0)` - serialized value may not be as expected\n", - " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxisWithHalo` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=1.0)` - serialized value may not be as expected\n", - " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxis` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=8.0)` - serialized value may not be as expected\n", - " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxisWithHalo` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=8.0)` - serialized value may not be as expected\n", - " v = handler(item, index)\n", - "/Users/qin/micromamba/envs/plant-seg-dev/lib/python3.12/site-packages/pydantic/_internal/_serializers.py:42: UserWarning: Pydantic serializer warnings:\n", - " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxis` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=1.0)` - serialized value may not be as expected\n", - " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxisWithHalo` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=1.0)` - serialized value may not be as expected\n", - " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxis` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=8.0)` - serialized value may not be as expected\n", - " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxisWithHalo` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=8.0)` - serialized value may not be as expected\n", - " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxis` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=8.0)` - serialized value may not be as expected\n", - " PydanticSerializationUnexpectedValue: Expected `TimeOutputAxisWithHalo` but got `SpaceOutputAxis` with value `SpaceOutputAxis(size=Size...', unit=None, scale=8.0)` - serialized value may not be as expected\n", - " v = handler(item, index)\n" - ] - }, - { - "data": { - "text/plain": [ - "{'name': 'Cellpose Plant Nuclei ResNet',\n", - " 'description': 'An experimental Cellpose nuclear model fine-tuned on ovules 1136, 1137, 1139, 1170 and tested on ovules 1135 (see reference for dataset details). A model for BioImage.IO team to test and develop post-processing tools.',\n", - " 'covers': [PosixPath('cellpose_raw_and_segmentation.jpg'),\n", - " PosixPath('cellpose_raw_and_probability.jpg'),\n", - " PosixPath('cellpose_raw.jpg')],\n", - " 'id_emoji': '🐼',\n", - " 'authors': [{'affiliation': 'EMBL',\n", - " 'email': None,\n", - " 'orcid': '0000-0002-4652-0795',\n", - " 'name': 'Qin Yu',\n", - " 'github_user': 'qin-yu'}],\n", - " 'attachments': [],\n", - " 'cite': [{'text': 'For more details of the model itself, see the manuscript',\n", - " 'doi': '10.1101/2024.02.19.580954',\n", - " 'url': None}],\n", - " 'license': 'MIT',\n", - " 'config': {'bioimageio': {'thumbnails': {'cellpose_raw.jpg': 'cellpose_raw.thumbnail.png',\n", - " 'cellpose_raw_and_probability.jpg': 'cellpose_raw_and_probability.thumbnail.png',\n", - " 'cellpose_raw_and_segmentation.jpg': 'cellpose_raw_and_segmentation.thumbnail.png'}}},\n", - " 'git_repo': 'https://github.com/kreshuklab/go-nuclear',\n", - " 'icon': None,\n", - " 'links': [],\n", - " 'uploader': {'email': 'qin.yu.95@outlook.com', 'name': 'Qin Yu'},\n", - " 'maintainers': [],\n", - " 'tags': ['cellpose', '3d', '2d', 'nuclei'],\n", - " 'version': '0.0.11',\n", - " 'format_version': '0.5.3',\n", - " 'type': 'model',\n", - " 'id': 'philosophical-panda',\n", - " 'documentation': PosixPath('README.md'),\n", - " 'inputs': [{'id': 'raw',\n", - " 'description': '',\n", - " 'axes': [{'size': {'min': 1, 'step': 1},\n", - " 'id': 'z',\n", - " 'description': '',\n", - " 'type': 'space',\n", - " 'unit': None,\n", - " 'scale': 1.0,\n", - " 'concatenable': False},\n", - " {'id': 'channel',\n", - " 'description': '',\n", - " 'type': 'channel',\n", - " 'channel_names': ['c1', 'c2']},\n", - " {'size': {'min': 16, 'step': 16},\n", - " 'id': 'y',\n", - " 'description': '',\n", - " 'type': 'space',\n", - " 'unit': None,\n", - " 'scale': 1.0,\n", - " 'concatenable': False},\n", - " {'size': {'min': 16, 'step': 16},\n", - " 'id': 'x',\n", - " 'description': '',\n", - " 'type': 'space',\n", - " 'unit': None,\n", - " 'scale': 1.0,\n", - " 'concatenable': False}],\n", - " 'test_tensor': {'source': PosixPath('test_input.npy'),\n", - " 'sha256': '6810255f5b5260fe39153f2192bedf30d9899ec4e770976b7813116c467579f0'},\n", - " 'sample_tensor': None,\n", - " 'data': {'type': 'float32',\n", - " 'range': (None, None),\n", - " 'unit': 'arbitrary unit',\n", - " 'scale': 1.0,\n", - " 'offset': None},\n", - " 'optional': False,\n", - " 'preprocessing': [{'id': 'ensure_dtype', 'kwargs': {'dtype': 'float32'}}]}],\n", - " 'outputs': [{'id': 'flow',\n", - " 'description': '',\n", - " 'axes': [{'size': {'tensor_id': 'raw', 'axis_id': 'z', 'offset': 0},\n", - " 'id': 'z',\n", - " 'description': '',\n", - " 'type': 'space',\n", - " 'unit': None,\n", - " 'scale': 1.0},\n", - " {'id': 'channel',\n", - " 'description': '',\n", - " 'type': 'channel',\n", - " 'channel_names': ['flow1', 'flow2', 'flow3']},\n", - " {'size': {'tensor_id': 'raw', 'axis_id': 'y', 'offset': 0},\n", - " 'id': 'y',\n", - " 'description': '',\n", - " 'type': 'space',\n", - " 'unit': None,\n", - " 'scale': 1.0},\n", - " {'size': {'tensor_id': 'raw', 'axis_id': 'x', 'offset': 0},\n", - " 'id': 'x',\n", - " 'description': '',\n", - " 'type': 'space',\n", - " 'unit': None,\n", - " 'scale': 1.0}],\n", - " 'test_tensor': {'source': PosixPath('test_output.npy'),\n", - " 'sha256': 'd802e3024da80bff93a9ec50fbe50b9c3946534aab1b60b911511111a8e2dbca'},\n", - " 'sample_tensor': None,\n", - " 'data': {'type': 'float32',\n", - " 'range': (None, None),\n", - " 'unit': 'arbitrary unit',\n", - " 'scale': 1.0,\n", - " 'offset': None},\n", - " 'postprocessing': [{'id': 'ensure_dtype', 'kwargs': {'dtype': 'float32'}}]},\n", - " {'id': 'style',\n", - " 'description': '',\n", - " 'axes': [{'size': {'tensor_id': 'raw', 'axis_id': 'z', 'offset': 0},\n", - " 'id': 'z',\n", - " 'description': '',\n", - " 'type': 'space',\n", - " 'unit': None,\n", - " 'scale': 1.0},\n", - " {'id': 'channel',\n", - " 'description': '',\n", - " 'type': 'channel',\n", - " 'channel_names': ['feature1',\n", - " 'feature2',\n", - " 'feature3',\n", - " 'feature4',\n", - " 'feature5',\n", - " 'feature6',\n", - " 'feature7',\n", - " 'feature8',\n", - " 'feature9',\n", - " 'feature10',\n", - " 'feature11',\n", - " 'feature12',\n", - " 'feature13',\n", - " 'feature14',\n", - " 'feature15',\n", - " 'feature16',\n", - " 'feature17',\n", - " 'feature18',\n", - " 'feature19',\n", - " 'feature20',\n", - " 'feature21',\n", - " 'feature22',\n", - " 'feature23',\n", - " 'feature24',\n", - " 'feature25',\n", - " 'feature26',\n", - " 'feature27',\n", - " 'feature28',\n", - " 'feature29',\n", - " 'feature30',\n", - " 'feature31',\n", - " 'feature32',\n", - " 'feature33',\n", - " 'feature34',\n", - " 'feature35',\n", - " 'feature36',\n", - " 'feature37',\n", - " 'feature38',\n", - " 'feature39',\n", - " 'feature40',\n", - " 'feature41',\n", - " 'feature42',\n", - " 'feature43',\n", - " 'feature44',\n", - " 'feature45',\n", - " 'feature46',\n", - " 'feature47',\n", - " 'feature48',\n", - " 'feature49',\n", - " 'feature50',\n", - " 'feature51',\n", - " 'feature52',\n", - " 'feature53',\n", - " 'feature54',\n", - " 'feature55',\n", - " 'feature56',\n", - " 'feature57',\n", - " 'feature58',\n", - " 'feature59',\n", - " 'feature60',\n", - " 'feature61',\n", - " 'feature62',\n", - " 'feature63',\n", - " 'feature64',\n", - " 'feature65',\n", - " 'feature66',\n", - " 'feature67',\n", - " 'feature68',\n", - " 'feature69',\n", - " 'feature70',\n", - " 'feature71',\n", - " 'feature72',\n", - " 'feature73',\n", - " 'feature74',\n", - " 'feature75',\n", - " 'feature76',\n", - " 'feature77',\n", - " 'feature78',\n", - " 'feature79',\n", - " 'feature80',\n", - " 'feature81',\n", - " 'feature82',\n", - " 'feature83',\n", - " 'feature84',\n", - " 'feature85',\n", - " 'feature86',\n", - " 'feature87',\n", - " 'feature88',\n", - " 'feature89',\n", - " 'feature90',\n", - " 'feature91',\n", - " 'feature92',\n", - " 'feature93',\n", - " 'feature94',\n", - " 'feature95',\n", - " 'feature96',\n", - " 'feature97',\n", - " 'feature98',\n", - " 'feature99',\n", - " 'feature100',\n", - " 'feature101',\n", - " 'feature102',\n", - " 'feature103',\n", - " 'feature104',\n", - " 'feature105',\n", - " 'feature106',\n", - " 'feature107',\n", - " 'feature108',\n", - " 'feature109',\n", - " 'feature110',\n", - " 'feature111',\n", - " 'feature112',\n", - " 'feature113',\n", - " 'feature114',\n", - " 'feature115',\n", - " 'feature116',\n", - " 'feature117',\n", - " 'feature118',\n", - " 'feature119',\n", - " 'feature120',\n", - " 'feature121',\n", - " 'feature122',\n", - " 'feature123',\n", - " 'feature124',\n", - " 'feature125',\n", - " 'feature126',\n", - " 'feature127',\n", - " 'feature128',\n", - " 'feature129',\n", - " 'feature130',\n", - " 'feature131',\n", - " 'feature132',\n", - " 'feature133',\n", - " 'feature134',\n", - " 'feature135',\n", - " 'feature136',\n", - " 'feature137',\n", - " 'feature138',\n", - " 'feature139',\n", - " 'feature140',\n", - " 'feature141',\n", - " 'feature142',\n", - " 'feature143',\n", - " 'feature144',\n", - " 'feature145',\n", - " 'feature146',\n", - " 'feature147',\n", - " 'feature148',\n", - " 'feature149',\n", - " 'feature150',\n", - " 'feature151',\n", - " 'feature152',\n", - " 'feature153',\n", - " 'feature154',\n", - " 'feature155',\n", - " 'feature156',\n", - " 'feature157',\n", - " 'feature158',\n", - " 'feature159',\n", - " 'feature160',\n", - " 'feature161',\n", - " 'feature162',\n", - " 'feature163',\n", - " 'feature164',\n", - " 'feature165',\n", - " 'feature166',\n", - " 'feature167',\n", - " 'feature168',\n", - " 'feature169',\n", - " 'feature170',\n", - " 'feature171',\n", - " 'feature172',\n", - " 'feature173',\n", - " 'feature174',\n", - " 'feature175',\n", - " 'feature176',\n", - " 'feature177',\n", - " 'feature178',\n", - " 'feature179',\n", - " 'feature180',\n", - " 'feature181',\n", - " 'feature182',\n", - " 'feature183',\n", - " 'feature184',\n", - " 'feature185',\n", - " 'feature186',\n", - " 'feature187',\n", - " 'feature188',\n", - " 'feature189',\n", - " 'feature190',\n", - " 'feature191',\n", - " 'feature192',\n", - " 'feature193',\n", - " 'feature194',\n", - " 'feature195',\n", - " 'feature196',\n", - " 'feature197',\n", - " 'feature198',\n", - " 'feature199',\n", - " 'feature200',\n", - " 'feature201',\n", - " 'feature202',\n", - " 'feature203',\n", - " 'feature204',\n", - " 'feature205',\n", - " 'feature206',\n", - " 'feature207',\n", - " 'feature208',\n", - " 'feature209',\n", - " 'feature210',\n", - " 'feature211',\n", - " 'feature212',\n", - " 'feature213',\n", - " 'feature214',\n", - " 'feature215',\n", - " 'feature216',\n", - " 'feature217',\n", - " 'feature218',\n", - " 'feature219',\n", - " 'feature220',\n", - " 'feature221',\n", - " 'feature222',\n", - " 'feature223',\n", - " 'feature224',\n", - " 'feature225',\n", - " 'feature226',\n", - " 'feature227',\n", - " 'feature228',\n", - " 'feature229',\n", - " 'feature230',\n", - " 'feature231',\n", - " 'feature232',\n", - " 'feature233',\n", - " 'feature234',\n", - " 'feature235',\n", - " 'feature236',\n", - " 'feature237',\n", - " 'feature238',\n", - " 'feature239',\n", - " 'feature240',\n", - " 'feature241',\n", - " 'feature242',\n", - " 'feature243',\n", - " 'feature244',\n", - " 'feature245',\n", - " 'feature246',\n", - " 'feature247',\n", - " 'feature248',\n", - " 'feature249',\n", - " 'feature250',\n", - " 'feature251',\n", - " 'feature252',\n", - " 'feature253',\n", - " 'feature254',\n", - " 'feature255',\n", - " 'feature256']}],\n", - " 'test_tensor': {'source': PosixPath('test_style.npy'),\n", - " 'sha256': 'ab464b406f9050561b40f7d76700ab5edf3aca97e31fe9a6069a51aeeca8bc81'},\n", - " 'sample_tensor': None,\n", - " 'data': {'type': 'float32',\n", - " 'range': (None, None),\n", - " 'unit': 'arbitrary unit',\n", - " 'scale': 1.0,\n", - " 'offset': None},\n", - " 'postprocessing': [{'id': 'ensure_dtype', 'kwargs': {'dtype': 'float32'}}]},\n", - " {'id': 'downsampled_0',\n", - " 'description': '',\n", - " 'axes': [{'size': {'tensor_id': 'raw', 'axis_id': 'z', 'offset': 0},\n", - " 'id': 'z',\n", - " 'description': '',\n", - " 'type': 'space',\n", - " 'unit': None,\n", - " 'scale': 1.0},\n", - " {'id': 'channel',\n", - " 'description': '',\n", - " 'type': 'channel',\n", - " 'channel_names': ['feature1',\n", - " 'feature2',\n", - " 'feature3',\n", - " 'feature4',\n", - " 'feature5',\n", - " 'feature6',\n", - " 'feature7',\n", - " 'feature8',\n", - " 'feature9',\n", - " 'feature10',\n", - " 'feature11',\n", - " 'feature12',\n", - " 'feature13',\n", - " 'feature14',\n", - " 'feature15',\n", - " 'feature16',\n", - " 'feature17',\n", - " 'feature18',\n", - " 'feature19',\n", - " 'feature20',\n", - " 'feature21',\n", - " 'feature22',\n", - " 'feature23',\n", - " 'feature24',\n", - " 'feature25',\n", - " 'feature26',\n", - " 'feature27',\n", - " 'feature28',\n", - " 'feature29',\n", - " 'feature30',\n", - " 'feature31',\n", - " 'feature32']},\n", - " {'size': {'tensor_id': 'raw', 'axis_id': 'y', 'offset': 0},\n", - " 'id': 'y',\n", - " 'description': '',\n", - " 'type': 'space',\n", - " 'unit': None,\n", - " 'scale': 1.0},\n", - " {'size': {'tensor_id': 'raw', 'axis_id': 'x', 'offset': 0},\n", - " 'id': 'x',\n", - " 'description': '',\n", - " 'type': 'space',\n", - " 'unit': None,\n", - " 'scale': 1.0}],\n", - " 'test_tensor': {'source': PosixPath('test_downsampled_0.npy'),\n", - " 'sha256': '67df53fb440e94dbb9c8e4003dcbde158646a7975c4878cacdd251e1fcfb4225'},\n", - " 'sample_tensor': None,\n", - " 'data': {'type': 'float32',\n", - " 'range': (None, None),\n", - " 'unit': 'arbitrary unit',\n", - " 'scale': 1.0,\n", - " 'offset': None},\n", - " 'postprocessing': [{'id': 'ensure_dtype', 'kwargs': {'dtype': 'float32'}}]},\n", - " {'id': 'downsampled_1',\n", - " 'description': '',\n", - " 'axes': [{'size': {'tensor_id': 'raw', 'axis_id': 'z', 'offset': 0},\n", - " 'id': 'z',\n", - " 'description': '',\n", - " 'type': 'space',\n", - " 'unit': None,\n", - " 'scale': 1.0},\n", - " {'id': 'channel',\n", - " 'description': '',\n", - " 'type': 'channel',\n", - " 'channel_names': ['feature1',\n", - " 'feature2',\n", - " 'feature3',\n", - " 'feature4',\n", - " 'feature5',\n", - " 'feature6',\n", - " 'feature7',\n", - " 'feature8',\n", - " 'feature9',\n", - " 'feature10',\n", - " 'feature11',\n", - " 'feature12',\n", - " 'feature13',\n", - " 'feature14',\n", - " 'feature15',\n", - " 'feature16',\n", - " 'feature17',\n", - " 'feature18',\n", - " 'feature19',\n", - " 'feature20',\n", - " 'feature21',\n", - " 'feature22',\n", - " 'feature23',\n", - " 'feature24',\n", - " 'feature25',\n", - " 'feature26',\n", - " 'feature27',\n", - " 'feature28',\n", - " 'feature29',\n", - " 'feature30',\n", - " 'feature31',\n", - " 'feature32',\n", - " 'feature33',\n", - " 'feature34',\n", - " 'feature35',\n", - " 'feature36',\n", - " 'feature37',\n", - " 'feature38',\n", - " 'feature39',\n", - " 'feature40',\n", - " 'feature41',\n", - " 'feature42',\n", - " 'feature43',\n", - " 'feature44',\n", - " 'feature45',\n", - " 'feature46',\n", - " 'feature47',\n", - " 'feature48',\n", - " 'feature49',\n", - " 'feature50',\n", - " 'feature51',\n", - " 'feature52',\n", - " 'feature53',\n", - " 'feature54',\n", - " 'feature55',\n", - " 'feature56',\n", - " 'feature57',\n", - " 'feature58',\n", - " 'feature59',\n", - " 'feature60',\n", - " 'feature61',\n", - " 'feature62',\n", - " 'feature63',\n", - " 'feature64']},\n", - " {'size': {'tensor_id': 'raw', 'axis_id': 'y', 'offset': 0},\n", - " 'id': 'y',\n", - " 'description': '',\n", - " 'type': 'space',\n", - " 'unit': None,\n", - " 'scale': 2.0},\n", - " {'size': {'tensor_id': 'raw', 'axis_id': 'x', 'offset': 0},\n", - " 'id': 'x',\n", - " 'description': '',\n", - " 'type': 'space',\n", - " 'unit': None,\n", - " 'scale': 2.0}],\n", - " 'test_tensor': {'source': PosixPath('test_downsampled_1.npy'),\n", - " 'sha256': 'cb4addbd763d96731ebd18ed001b87ab7195ec9198f01a753a363a06c27bfb1c'},\n", - " 'sample_tensor': None,\n", - " 'data': {'type': 'float32',\n", - " 'range': (None, None),\n", - " 'unit': 'arbitrary unit',\n", - " 'scale': 1.0,\n", - " 'offset': None},\n", - " 'postprocessing': [{'id': 'ensure_dtype', 'kwargs': {'dtype': 'float32'}}]},\n", - " {'id': 'downsampled_2',\n", - " 'description': '',\n", - " 'axes': [{'size': {'tensor_id': 'raw', 'axis_id': 'z', 'offset': 0},\n", - " 'id': 'z',\n", - " 'description': '',\n", - " 'type': 'space',\n", - " 'unit': None,\n", - " 'scale': 1.0},\n", - " {'id': 'channel',\n", - " 'description': '',\n", - " 'type': 'channel',\n", - " 'channel_names': ['feature1',\n", - " 'feature2',\n", - " 'feature3',\n", - " 'feature4',\n", - " 'feature5',\n", - " 'feature6',\n", - " 'feature7',\n", - " 'feature8',\n", - " 'feature9',\n", - " 'feature10',\n", - " 'feature11',\n", - " 'feature12',\n", - " 'feature13',\n", - " 'feature14',\n", - " 'feature15',\n", - " 'feature16',\n", - " 'feature17',\n", - " 'feature18',\n", - " 'feature19',\n", - " 'feature20',\n", - " 'feature21',\n", - " 'feature22',\n", - " 'feature23',\n", - " 'feature24',\n", - " 'feature25',\n", - " 'feature26',\n", - " 'feature27',\n", - " 'feature28',\n", - " 'feature29',\n", - " 'feature30',\n", - " 'feature31',\n", - " 'feature32',\n", - " 'feature33',\n", - " 'feature34',\n", - " 'feature35',\n", - " 'feature36',\n", - " 'feature37',\n", - " 'feature38',\n", - " 'feature39',\n", - " 'feature40',\n", - " 'feature41',\n", - " 'feature42',\n", - " 'feature43',\n", - " 'feature44',\n", - " 'feature45',\n", - " 'feature46',\n", - " 'feature47',\n", - " 'feature48',\n", - " 'feature49',\n", - " 'feature50',\n", - " 'feature51',\n", - " 'feature52',\n", - " 'feature53',\n", - " 'feature54',\n", - " 'feature55',\n", - " 'feature56',\n", - " 'feature57',\n", - " 'feature58',\n", - " 'feature59',\n", - " 'feature60',\n", - " 'feature61',\n", - " 'feature62',\n", - " 'feature63',\n", - " 'feature64',\n", - " 'feature65',\n", - " 'feature66',\n", - " 'feature67',\n", - " 'feature68',\n", - " 'feature69',\n", - " 'feature70',\n", - " 'feature71',\n", - " 'feature72',\n", - " 'feature73',\n", - " 'feature74',\n", - " 'feature75',\n", - " 'feature76',\n", - " 'feature77',\n", - " 'feature78',\n", - " 'feature79',\n", - " 'feature80',\n", - " 'feature81',\n", - " 'feature82',\n", - " 'feature83',\n", - " 'feature84',\n", - " 'feature85',\n", - " 'feature86',\n", - " 'feature87',\n", - " 'feature88',\n", - " 'feature89',\n", - " 'feature90',\n", - " 'feature91',\n", - " 'feature92',\n", - " 'feature93',\n", - " 'feature94',\n", - " 'feature95',\n", - " 'feature96',\n", - " 'feature97',\n", - " 'feature98',\n", - " 'feature99',\n", - " 'feature100',\n", - " 'feature101',\n", - " 'feature102',\n", - " 'feature103',\n", - " 'feature104',\n", - " 'feature105',\n", - " 'feature106',\n", - " 'feature107',\n", - " 'feature108',\n", - " 'feature109',\n", - " 'feature110',\n", - " 'feature111',\n", - " 'feature112',\n", - " 'feature113',\n", - " 'feature114',\n", - " 'feature115',\n", - " 'feature116',\n", - " 'feature117',\n", - " 'feature118',\n", - " 'feature119',\n", - " 'feature120',\n", - " 'feature121',\n", - " 'feature122',\n", - " 'feature123',\n", - " 'feature124',\n", - " 'feature125',\n", - " 'feature126',\n", - " 'feature127',\n", - " 'feature128']},\n", - " {'size': {'tensor_id': 'raw', 'axis_id': 'y', 'offset': 0},\n", - " 'id': 'y',\n", - " 'description': '',\n", - " 'type': 'space',\n", - " 'unit': None,\n", - " 'scale': 4.0},\n", - " {'size': {'tensor_id': 'raw', 'axis_id': 'x', 'offset': 0},\n", - " 'id': 'x',\n", - " 'description': '',\n", - " 'type': 'space',\n", - " 'unit': None,\n", - " 'scale': 4.0}],\n", - " 'test_tensor': {'source': PosixPath('test_downsampled_2.npy'),\n", - " 'sha256': '9c0225b94d84fcc3adfb9a73eef1303d6adb318b57a5a801e0e2e1638b458e72'},\n", - " 'sample_tensor': None,\n", - " 'data': {'type': 'float32',\n", - " 'range': (None, None),\n", - " 'unit': 'arbitrary unit',\n", - " 'scale': 1.0,\n", - " 'offset': None},\n", - " 'postprocessing': [{'id': 'ensure_dtype', 'kwargs': {'dtype': 'float32'}}]},\n", - " {'id': 'downsampled_3',\n", - " 'description': '',\n", - " 'axes': [{'size': {'tensor_id': 'raw', 'axis_id': 'z', 'offset': 0},\n", - " 'id': 'z',\n", - " 'description': '',\n", - " 'type': 'space',\n", - " 'unit': None,\n", - " 'scale': 1.0},\n", - " {'id': 'channel',\n", - " 'description': '',\n", - " 'type': 'channel',\n", - " 'channel_names': ['feature1',\n", - " 'feature2',\n", - " 'feature3',\n", - " 'feature4',\n", - " 'feature5',\n", - " 'feature6',\n", - " 'feature7',\n", - " 'feature8',\n", - " 'feature9',\n", - " 'feature10',\n", - " 'feature11',\n", - " 'feature12',\n", - " 'feature13',\n", - " 'feature14',\n", - " 'feature15',\n", - " 'feature16',\n", - " 'feature17',\n", - " 'feature18',\n", - " 'feature19',\n", - " 'feature20',\n", - " 'feature21',\n", - " 'feature22',\n", - " 'feature23',\n", - " 'feature24',\n", - " 'feature25',\n", - " 'feature26',\n", - " 'feature27',\n", - " 'feature28',\n", - " 'feature29',\n", - " 'feature30',\n", - " 'feature31',\n", - " 'feature32',\n", - " 'feature33',\n", - " 'feature34',\n", - " 'feature35',\n", - " 'feature36',\n", - " 'feature37',\n", - " 'feature38',\n", - " 'feature39',\n", - " 'feature40',\n", - " 'feature41',\n", - " 'feature42',\n", - " 'feature43',\n", - " 'feature44',\n", - " 'feature45',\n", - " 'feature46',\n", - " 'feature47',\n", - " 'feature48',\n", - " 'feature49',\n", - " 'feature50',\n", - " 'feature51',\n", - " 'feature52',\n", - " 'feature53',\n", - " 'feature54',\n", - " 'feature55',\n", - " 'feature56',\n", - " 'feature57',\n", - " 'feature58',\n", - " 'feature59',\n", - " 'feature60',\n", - " 'feature61',\n", - " 'feature62',\n", - " 'feature63',\n", - " 'feature64',\n", - " 'feature65',\n", - " 'feature66',\n", - " 'feature67',\n", - " 'feature68',\n", - " 'feature69',\n", - " 'feature70',\n", - " 'feature71',\n", - " 'feature72',\n", - " 'feature73',\n", - " 'feature74',\n", - " 'feature75',\n", - " 'feature76',\n", - " 'feature77',\n", - " 'feature78',\n", - " 'feature79',\n", - " 'feature80',\n", - " 'feature81',\n", - " 'feature82',\n", - " 'feature83',\n", - " 'feature84',\n", - " 'feature85',\n", - " 'feature86',\n", - " 'feature87',\n", - " 'feature88',\n", - " 'feature89',\n", - " 'feature90',\n", - " 'feature91',\n", - " 'feature92',\n", - " 'feature93',\n", - " 'feature94',\n", - " 'feature95',\n", - " 'feature96',\n", - " 'feature97',\n", - " 'feature98',\n", - " 'feature99',\n", - " 'feature100',\n", - " 'feature101',\n", - " 'feature102',\n", - " 'feature103',\n", - " 'feature104',\n", - " 'feature105',\n", - " 'feature106',\n", - " 'feature107',\n", - " 'feature108',\n", - " 'feature109',\n", - " 'feature110',\n", - " 'feature111',\n", - " 'feature112',\n", - " 'feature113',\n", - " 'feature114',\n", - " 'feature115',\n", - " 'feature116',\n", - " 'feature117',\n", - " 'feature118',\n", - " 'feature119',\n", - " 'feature120',\n", - " 'feature121',\n", - " 'feature122',\n", - " 'feature123',\n", - " 'feature124',\n", - " 'feature125',\n", - " 'feature126',\n", - " 'feature127',\n", - " 'feature128',\n", - " 'feature129',\n", - " 'feature130',\n", - " 'feature131',\n", - " 'feature132',\n", - " 'feature133',\n", - " 'feature134',\n", - " 'feature135',\n", - " 'feature136',\n", - " 'feature137',\n", - " 'feature138',\n", - " 'feature139',\n", - " 'feature140',\n", - " 'feature141',\n", - " 'feature142',\n", - " 'feature143',\n", - " 'feature144',\n", - " 'feature145',\n", - " 'feature146',\n", - " 'feature147',\n", - " 'feature148',\n", - " 'feature149',\n", - " 'feature150',\n", - " 'feature151',\n", - " 'feature152',\n", - " 'feature153',\n", - " 'feature154',\n", - " 'feature155',\n", - " 'feature156',\n", - " 'feature157',\n", - " 'feature158',\n", - " 'feature159',\n", - " 'feature160',\n", - " 'feature161',\n", - " 'feature162',\n", - " 'feature163',\n", - " 'feature164',\n", - " 'feature165',\n", - " 'feature166',\n", - " 'feature167',\n", - " 'feature168',\n", - " 'feature169',\n", - " 'feature170',\n", - " 'feature171',\n", - " 'feature172',\n", - " 'feature173',\n", - " 'feature174',\n", - " 'feature175',\n", - " 'feature176',\n", - " 'feature177',\n", - " 'feature178',\n", - " 'feature179',\n", - " 'feature180',\n", - " 'feature181',\n", - " 'feature182',\n", - " 'feature183',\n", - " 'feature184',\n", - " 'feature185',\n", - " 'feature186',\n", - " 'feature187',\n", - " 'feature188',\n", - " 'feature189',\n", - " 'feature190',\n", - " 'feature191',\n", - " 'feature192',\n", - " 'feature193',\n", - " 'feature194',\n", - " 'feature195',\n", - " 'feature196',\n", - " 'feature197',\n", - " 'feature198',\n", - " 'feature199',\n", - " 'feature200',\n", - " 'feature201',\n", - " 'feature202',\n", - " 'feature203',\n", - " 'feature204',\n", - " 'feature205',\n", - " 'feature206',\n", - " 'feature207',\n", - " 'feature208',\n", - " 'feature209',\n", - " 'feature210',\n", - " 'feature211',\n", - " 'feature212',\n", - " 'feature213',\n", - " 'feature214',\n", - " 'feature215',\n", - " 'feature216',\n", - " 'feature217',\n", - " 'feature218',\n", - " 'feature219',\n", - " 'feature220',\n", - " 'feature221',\n", - " 'feature222',\n", - " 'feature223',\n", - " 'feature224',\n", - " 'feature225',\n", - " 'feature226',\n", - " 'feature227',\n", - " 'feature228',\n", - " 'feature229',\n", - " 'feature230',\n", - " 'feature231',\n", - " 'feature232',\n", - " 'feature233',\n", - " 'feature234',\n", - " 'feature235',\n", - " 'feature236',\n", - " 'feature237',\n", - " 'feature238',\n", - " 'feature239',\n", - " 'feature240',\n", - " 'feature241',\n", - " 'feature242',\n", - " 'feature243',\n", - " 'feature244',\n", - " 'feature245',\n", - " 'feature246',\n", - " 'feature247',\n", - " 'feature248',\n", - " 'feature249',\n", - " 'feature250',\n", - " 'feature251',\n", - " 'feature252',\n", - " 'feature253',\n", - " 'feature254',\n", - " 'feature255',\n", - " 'feature256']},\n", - " {'size': {'tensor_id': 'raw', 'axis_id': 'y', 'offset': 0},\n", - " 'id': 'y',\n", - " 'description': '',\n", - " 'type': 'space',\n", - " 'unit': None,\n", - " 'scale': 8.0},\n", - " {'size': {'tensor_id': 'raw', 'axis_id': 'x', 'offset': 0},\n", - " 'id': 'x',\n", - " 'description': '',\n", - " 'type': 'space',\n", - " 'unit': None,\n", - " 'scale': 8.0}],\n", - " 'test_tensor': {'source': PosixPath('test_downsampled_3.npy'),\n", - " 'sha256': '1ea789ff37d47197c847b585799f7d063e7592b0c5e9c3094fd0e3ac209b7fc2'},\n", - " 'sample_tensor': None,\n", - " 'data': {'type': 'float32',\n", - " 'range': (None, None),\n", - " 'unit': 'arbitrary unit',\n", - " 'scale': 1.0,\n", - " 'offset': None},\n", - " 'postprocessing': [{'id': 'ensure_dtype',\n", - " 'kwargs': {'dtype': 'float32'}}]}],\n", - " 'packaged_by': [],\n", - " 'parent': None,\n", - " 'run_mode': None,\n", - " 'timestamp': datetime.datetime(2024, 12, 16, 13, 13, 2, 698812),\n", - " 'training_data': None,\n", - " 'weights': {'keras_hdf5': None,\n", - " 'onnx': None,\n", - " 'pytorch_state_dict': {'source': PosixPath('cp_state_dict_1135_gold.pth'),\n", - " 'sha256': '26c277f3b8f6ca5aab30b4b0a832601aea60183cbed1c2333576f4135a643eb2',\n", - " 'authors': None,\n", - " 'parent': None,\n", - " 'architecture': {'source': PosixPath('cpnet_wrapper.py'),\n", - " 'sha256': 'b8b947cdd0ea8f5b98bd7be5f12f38bb1ea1ebe0b455c62d9a6389cd21d134bf',\n", - " 'callable': 'CPnetBioImageIO',\n", - " 'kwargs': {'conv_3D': False,\n", - " 'max_pool': True,\n", - " 'mkldnn': False,\n", - " 'nbase': [2, 32, 64, 128, 256],\n", - " 'nout': 3,\n", - " 'sz': 3}},\n", - " 'pytorch_version': '2.3.1',\n", - " 'dependencies': None},\n", - " 'tensorflow_js': None,\n", - " 'tensorflow_saved_model_bundle': None,\n", - " 'torchscript': {'source': PosixPath('cp_traced_1135_gold.pt'),\n", - " 'sha256': 'f61bae146ab522902350eadda1d509ac1037726fe6d7fb63f6a8a314021d63e7',\n", - " 'authors': None,\n", - " 'parent': 'pytorch_state_dict',\n", - " 'pytorch_version': '2.3.1'}}}" - ] - }, - "execution_count": 12, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "model.model_dump()" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "['batch', 'channel', 'z', 'y', 'x']" - ] - }, - "execution_count": 10, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "[AxisId(a) if isinstance(a, str) else a.id for a in model.inputs[0].axes]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "{AxisId(a) if isinstance(a, str) else a.id : a.get('size', 1) for a in model.inputs[0].axes}" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "plant-seg-dev", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.12.7" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} From fca44de09f15602310472ccb6245e85f1306795a Mon Sep 17 00:00:00 2001 From: Qin Yu Date: Mon, 16 Dec 2024 22:58:40 +0100 Subject: [PATCH 20/32] fix(ci): fix conda build CI `setuptools` missing etc. --- .github/workflows/build-and-publish-docs.yml | 2 +- conda-recipe/meta.yaml | 1 + setup.py | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build-and-publish-docs.yml b/.github/workflows/build-and-publish-docs.yml index a8c1163b..d4a33122 100644 --- a/.github/workflows/build-and-publish-docs.yml +++ b/.github/workflows/build-and-publish-docs.yml @@ -3,7 +3,7 @@ on: push: branches: - master - - qy/add-plantseg-v1-installation + - qy/unify-prediction permissions: contents: write diff --git a/conda-recipe/meta.yaml b/conda-recipe/meta.yaml index 8bf014fe..dfc1fca6 100644 --- a/conda-recipe/meta.yaml +++ b/conda-recipe/meta.yaml @@ -17,6 +17,7 @@ requirements: build: - python - pip + - setuptools run: - python >=3.9 diff --git a/setup.py b/setup.py index 710a650f..7235b4dc 100644 --- a/setup.py +++ b/setup.py @@ -12,7 +12,7 @@ description='PlantSeg is a tool for cell instance aware segmentation in densely packed 3D volumetric images.', author='Lorenzo Cerrone, Adrian Wolny, Qin Yu', url='https://github.com/kreshuklab/plant-seg', - author_email='lorenzo.cerrone@iwr.uni-heidelberg.de', + author_email='lorenzo.cerrone@uzh.ch, qin.yu@embl.de', entry_points={ 'console_scripts': [ 'plantseg=plantseg.run_plantseg:main', From 74d644e147293d2e1f9c13671f1a04ee4b5def5d Mon Sep 17 00:00:00 2001 From: Qin Yu Date: Mon, 16 Dec 2024 23:10:32 +0100 Subject: [PATCH 21/32] fix(docs): missing index page for API --- mkdocs.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/mkdocs.yml b/mkdocs.yml index c9ae3d30..f2438fd2 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -4,7 +4,7 @@ site_description: Cell instance aware segmentation in densely packed 3D volumetr repo_name: kreshuklab/plant-seg repo_url: https://github.com/kreshuklab/plant-seg edit_uri: edit/main/docs/ -copyright: Copyright © 2019 - 2024 Lorenzo Cerrone, Adrian Wolny, Qin Yu +copyright: Copyright © 2019 - 2025 Lorenzo Cerrone, Adrian Wolny, Qin Yu theme: name: material @@ -116,6 +116,7 @@ nav: - Training: chapters/plantseg_models/training.md - API: + - chapters/python_api/index.md - tasks: - plantseg.tasks.io_tasks: chapters/python_api/tasks/io_tasks.md - plantseg.tasks.dataprocessing_tasks: chapters/python_api/tasks/dataprocessing_tasks.md From c6dd2a1d37187b63b7a62ef8023380bd02dfbfd9 Mon Sep 17 00:00:00 2001 From: Qin Yu Date: Tue, 17 Dec 2024 18:20:23 +0100 Subject: [PATCH 22/32] ci: inspect why action no.95 and no.96 can't finish --- .github/workflows/build-and-test-package.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build-and-test-package.yml b/.github/workflows/build-and-test-package.yml index bd9c8b14..43a0bd59 100644 --- a/.github/workflows/build-and-test-package.yml +++ b/.github/workflows/build-and-test-package.yml @@ -62,7 +62,7 @@ jobs: shell: bash -l {0} run: | conda activate plant-seg - pytest --cov --cov-report=xml + pytest -s --cov --cov-report=xml conda deactivate # Upload Codecov report From f30089070e36674e732835bc9999c64a48a96cf1 Mon Sep 17 00:00:00 2001 From: Qin Yu Date: Wed, 18 Dec 2024 00:05:26 +0100 Subject: [PATCH 23/32] fix: temp fix for ci hanging on napari GUI --- tests/widgets/test_widget_open_file.py | 6 ++++++ tests/widgets/test_widget_preprocessing.py | 5 +++++ 2 files changed, 11 insertions(+) diff --git a/tests/widgets/test_widget_open_file.py b/tests/widgets/test_widget_open_file.py index dc5a48ca..e0300986 100644 --- a/tests/widgets/test_widget_open_file.py +++ b/tests/widgets/test_widget_open_file.py @@ -1,11 +1,17 @@ +import os + import napari import numpy as np +import pytest from plantseg.io.h5 import create_h5 from plantseg.io.voxelsize import VoxelSize from plantseg.viewer_napari.widgets.io import PathMode, widget_open_file +IN_GITHUB_ACTIONS = os.getenv("GITHUB_ACTIONS") == "true" # set to true in GitHub Actions by default to skip CUDA tests + +@pytest.mark.skipif(IN_GITHUB_ACTIONS, reason="GUI tests hangs in GitHub Actions.") def test_widget_open_file(make_napari_viewer_proxy, path_h5): viewer = make_napari_viewer_proxy() shape = (10, 10, 10) diff --git a/tests/widgets/test_widget_preprocessing.py b/tests/widgets/test_widget_preprocessing.py index 6b1f8411..0173563e 100644 --- a/tests/widgets/test_widget_preprocessing.py +++ b/tests/widgets/test_widget_preprocessing.py @@ -1,3 +1,5 @@ +import os + import napari import numpy as np import pytest @@ -8,6 +10,8 @@ from plantseg.io.voxelsize import VoxelSize from plantseg.viewer_napari.widgets.dataprocessing import RescaleModes, widget_rescaling +IN_GITHUB_ACTIONS = os.getenv("GITHUB_ACTIONS") == "true" # set to true in GitHub Actions by default to skip CUDA tests + def create_layer_name(name: str, suffix: str): return f"{name}_{suffix}" @@ -47,6 +51,7 @@ def widget_add_image(image: PlantSegImage) -> LayerDataTuple: return image.to_napari_layer_tuple() +@pytest.mark.skipif(IN_GITHUB_ACTIONS, reason="GUI tests hangs in GitHub Actions.") class TestWidgetRescaling: def test_rescaling_from_factor(self, make_napari_viewer_proxy, sample_image): viewer = make_napari_viewer_proxy() From 6deb8c117be005fe4db8df8a61cb3fb8f6eb54a4 Mon Sep 17 00:00:00 2001 From: Qin Yu Date: Wed, 18 Dec 2024 01:13:46 +0100 Subject: [PATCH 24/32] feat: bioimageio.core prediction functional and task --- plantseg/functionals/prediction/__init__.py | 3 +- plantseg/functionals/prediction/prediction.py | 27 +++++----- plantseg/tasks/prediction_tasks.py | 34 ++++++++++++- plantseg/viewer_napari/widgets/prediction.py | 51 +++++++++++-------- 4 files changed, 79 insertions(+), 36 deletions(-) diff --git a/plantseg/functionals/prediction/__init__.py b/plantseg/functionals/prediction/__init__.py index 8ce10d6b..956bd936 100644 --- a/plantseg/functionals/prediction/__init__.py +++ b/plantseg/functionals/prediction/__init__.py @@ -1,6 +1,7 @@ -from plantseg.functionals.prediction.prediction import unet_prediction +from plantseg.functionals.prediction.prediction import biio_prediction, unet_prediction # Use __all__ to let type checkers know what is part of the public API. __all__ = [ "unet_prediction", + "biio_prediction", ] diff --git a/plantseg/functionals/prediction/prediction.py b/plantseg/functionals/prediction/prediction.py index 5b6e3377..a0f6937d 100644 --- a/plantseg/functionals/prediction/prediction.py +++ b/plantseg/functionals/prediction/prediction.py @@ -28,7 +28,7 @@ def biio_prediction( raw: np.ndarray, input_layout: ImageLayout, model_id: str, -) -> np.ndarray: +) -> dict[str, np.ndarray]: assert isinstance(input_layout, str) model = load_model_description(model_id) @@ -93,13 +93,15 @@ def biio_prediction( assert isinstance(sample_out, Sample) if len(sample_out.members) != 1: logger.warning("Model has more than one output tensor. PlantSeg does not support this yet.") - t = {i: o.transpose(['batch', 'channel', 'z', 'y', 'x']) for i, o in sample_out.members.items()} - pmaps = [] - for i, bczyx in t.items(): - for czyx in bczyx: - for zyx in czyx: - pmaps.append(zyx.data.to_numpy()) - return pmaps # FIXME: Wrong return type + desired_axes = [AxisId(a) for a in ['batch', 'channel', 'z', 'y', 'x']] + t = {i: o.transpose(desired_axes) for i, o in sample_out.members.items()} + named_pmaps = {} + for key, bczyx in t.items(): + bczyx = bczyx.data.to_numpy() + assert bczyx.ndim == 5, f"Expected 5D BCZYX-transposed prediction from `bioimageio.core`, got {bczyx.ndim}D" + for b, czyx in enumerate(bczyx): + named_pmaps[f'{key}_{b}'] = czyx + return named_pmaps # list of CZYX arrays def unet_prediction( @@ -121,6 +123,10 @@ def unet_prediction( This function handles both single and multi-channel outputs from the model, returning appropriately shaped arrays based on the output channel configuration. + For Bioimage.IO Model Zoo models, weights are downloaded and loaded into `UNet3D` or `UNet2D` + in `plantseg.training.model`, i.e. `bioimageio.core` is not used. `biio_prediction()` uses + `bioimageio.core` for loading and running models. + Args: raw (np.ndarray): Raw input data. Input_layout (ImageLayout): The layout of the input data. @@ -147,10 +153,7 @@ def unet_prediction( model, model_config, model_path = model_zoo.get_model_by_config_path(config_path, model_weights_path) elif model_id is not None: # BioImage.IO zoo mode logger.info("BioImage.IO prediction: Running model from BioImage.IO model zoo.") - if True: # NOTE: For now, do not use native pytorch-3dunet prediction if using BioImage.IO models - return biio_prediction(raw=raw, input_layout=input_layout, model_id=model_id) - else: - model, model_config, model_path = model_zoo.get_model_by_id(model_id) + model, model_config, model_path = model_zoo.get_model_by_id(model_id) elif model_name is not None: # PlantSeg zoo mode logger.info("Zoo prediction: Running model from PlantSeg official zoo.") model, model_config, model_path = model_zoo.get_model_by_name(model_name, model_update=model_update) diff --git a/plantseg/tasks/prediction_tasks.py b/plantseg/tasks/prediction_tasks.py index 7e9f6402..42107ae5 100644 --- a/plantseg/tasks/prediction_tasks.py +++ b/plantseg/tasks/prediction_tasks.py @@ -2,7 +2,7 @@ from plantseg.core.image import ImageLayout, PlantSegImage, SemanticType from plantseg.functionals.dataprocessing import fix_layout -from plantseg.functionals.prediction import unet_prediction +from plantseg.functionals.prediction import biio_prediction, unet_prediction from plantseg.tasks import task_tracker @@ -51,7 +51,7 @@ def unet_prediction_task( config_path=config_path, model_weights_path=model_weights_path, ) - # assert pmaps.ndim == 4, f"Expected 4D CZXY prediction, got {pmaps.ndim}D" + assert pmaps.ndim == 4, f"Expected 4D CZXY prediction, got {pmaps.ndim}D" new_images = [] @@ -68,3 +68,33 @@ def unet_prediction_task( ) return new_images + + +@task_tracker +def biio_prediction_task( + image: PlantSegImage, + model_id: str, + suffix: str = "_prediction", +) -> list[PlantSegImage]: + data = image.get_data() + input_layout = image.image_layout.value + + named_pmaps = biio_prediction( + raw=data, + input_layout=input_layout, + model_id=model_id, + ) + + new_images = [] + for name, pmap in named_pmaps.items(): + # Input layout is always ZYX this loop + pmap = fix_layout(pmap, input_layout=input_layout, output_layout='CZYX') + new_images.append( + image.derive_new( + pmap, + name=f"{image.name}_{suffix}_{name}", + semantic_type=SemanticType.PREDICTION, + image_layout='CZYX', + ) + ) + return new_images diff --git a/plantseg/viewer_napari/widgets/prediction.py b/plantseg/viewer_napari/widgets/prediction.py index 3735db6b..28e5164f 100644 --- a/plantseg/viewer_napari/widgets/prediction.py +++ b/plantseg/viewer_napari/widgets/prediction.py @@ -12,7 +12,7 @@ from plantseg.core.image import PlantSegImage from plantseg.core.zoo import model_zoo -from plantseg.tasks.prediction_tasks import unet_prediction_task +from plantseg.tasks.prediction_tasks import biio_prediction_task, unet_prediction_task from plantseg.viewer_napari import log from plantseg.viewer_napari.widgets.proofreading import widget_split_and_merge_from_scribbles from plantseg.viewer_napari.widgets.segmentation import widget_agglomeration, widget_dt_ws @@ -138,35 +138,44 @@ def widget_unet_prediction( patch_halo: tuple[int, int, int] = (0, 0, 0), single_patch: bool = False, ) -> None: + ps_image = PlantSegImage.from_napari_layer(image) + widgets_to_update = [ + widget_dt_ws.image, + widget_agglomeration.image, + widget_split_and_merge_from_scribbles.image, + ] if mode is UNetPredictionMode.PLANTSEG: suffix = model_name model_id = None + return schedule_task( + unet_prediction_task, + task_kwargs={ + "image": ps_image, + "model_name": model_name, + "model_id": model_id, + "suffix": suffix, + "patch": patch_size if advanced else None, + "patch_halo": patch_halo if advanced else None, + "single_batch_mode": single_patch if advanced else False, + "device": device, + }, + widgets_to_update=widgets_to_update, + ) elif mode is UNetPredictionMode.BIOIMAGEIO: suffix = model_id model_name = None + return schedule_task( + biio_prediction_task, + task_kwargs={ + "image": ps_image, + "model_id": model_id, + "suffix": suffix, + }, + widgets_to_update=widgets_to_update, + ) else: raise NotImplementedError(f'Mode {mode} not implemented yet.') - ps_image = PlantSegImage.from_napari_layer(image) - return schedule_task( - unet_prediction_task, - task_kwargs={ - "image": ps_image, - "model_name": model_name, - "model_id": model_id, - "suffix": suffix, - "patch": patch_size if advanced else None, - "patch_halo": patch_halo if advanced else None, - "single_batch_mode": single_patch if advanced else False, - "device": device, - }, - widgets_to_update=[ - widget_dt_ws.image, - widget_agglomeration.image, - widget_split_and_merge_from_scribbles.image, - ], - ) - widget_unet_prediction.insert(3, model_filters) From f4efde4acf3dbbe2f68be126e620d51d31f735b0 Mon Sep 17 00:00:00 2001 From: Qin Yu Date: Wed, 18 Dec 2024 01:14:31 +0100 Subject: [PATCH 25/32] refactor: improve naming of bioimageio.core output --- plantseg/functionals/prediction/prediction.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/plantseg/functionals/prediction/prediction.py b/plantseg/functionals/prediction/prediction.py index a0f6937d..2a895df9 100644 --- a/plantseg/functionals/prediction/prediction.py +++ b/plantseg/functionals/prediction/prediction.py @@ -96,11 +96,14 @@ def biio_prediction( desired_axes = [AxisId(a) for a in ['batch', 'channel', 'z', 'y', 'x']] t = {i: o.transpose(desired_axes) for i, o in sample_out.members.items()} named_pmaps = {} - for key, bczyx in t.items(): - bczyx = bczyx.data.to_numpy() + for key, tensor_bczyx in t.items(): + bczyx = tensor_bczyx.data.to_numpy() assert bczyx.ndim == 5, f"Expected 5D BCZYX-transposed prediction from `bioimageio.core`, got {bczyx.ndim}D" - for b, czyx in enumerate(bczyx): - named_pmaps[f'{key}_{b}'] = czyx + if bczyx.shape[0] == 1: + named_pmaps[f'{key}'] = bczyx[0] + else: + for b, czyx in enumerate(bczyx): + named_pmaps[f'{key}_{b}'] = czyx return named_pmaps # list of CZYX arrays From bff023c1697488be6a28d5f2bd6ea31986fbc8eb Mon Sep 17 00:00:00 2001 From: Qin Yu Date: Wed, 18 Dec 2024 03:05:55 +0100 Subject: [PATCH 26/32] fix: prediction widget is not updated by import --- plantseg/viewer_napari/widgets/io.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/plantseg/viewer_napari/widgets/io.py b/plantseg/viewer_napari/widgets/io.py index 7a3d689d..f9bedfa9 100644 --- a/plantseg/viewer_napari/widgets/io.py +++ b/plantseg/viewer_napari/widgets/io.py @@ -14,6 +14,7 @@ from plantseg.tasks.io_tasks import export_image_task, import_image_task from plantseg.tasks.workflow_handler import workflow_handler from plantseg.viewer_napari import log +from plantseg.viewer_napari.widgets.prediction import widget_unet_prediction from plantseg.viewer_napari.widgets.utils import _return_value_if_widget, schedule_task current_dataset_keys: list[str] | None = None @@ -101,7 +102,10 @@ def widget_open_file( elif layer_type == ImageType.LABEL.value: semantic_type = SemanticType.SEGMENTATION - widgets_to_update = [widget_set_voxel_size.layer] + widgets_to_update = [ + widget_set_voxel_size.layer, + widget_unet_prediction.image, + ] return schedule_task( import_image_task, From d8eef3338ba9b187839bd603593af58868c31b92 Mon Sep 17 00:00:00 2001 From: Qin Yu Date: Wed, 18 Dec 2024 03:10:14 +0100 Subject: [PATCH 27/32] refactor: support more formats of bioimageio axes specs Now not only axes with `.id` and `.size` attributes but also string axes such as 'bcyx' coming with `shape` at the same level. These are due to the legacy of bioimage.io and complexity of versioning. Tested with `pioneering-rhino` --- plantseg/functionals/prediction/prediction.py | 91 +++++++++++-------- plantseg/tasks/prediction_tasks.py | 3 +- 2 files changed, 56 insertions(+), 38 deletions(-) diff --git a/plantseg/functionals/prediction/prediction.py b/plantseg/functionals/prediction/prediction.py index 2a895df9..afa5d6d1 100644 --- a/plantseg/functionals/prediction/prediction.py +++ b/plantseg/functionals/prediction/prediction.py @@ -50,51 +50,70 @@ def biio_prediction( dims = tuple( AxisId('channel') if item.lower() == 'c' else AxisId(item.lower()) for item in input_layout ) # `AxisId` has to be "channel" not "c" - members = { - TensorId(tensor_id): Tensor(array=raw, dims=dims).transpose( - [AxisId(a) if isinstance(a, str) else a.id for a in axes] + + if isinstance(axes[0], str): # then it's a <=0.4.10 model, `predict_sample_block` is not implemented + logger.warning( + "Model is older than 0.5.0. PlantSeg will try to run BioImage.IO core inference, but it is not supported by BioImage.IO core." ) - } - sample = Sample(members=members, stat={}, id="raw") - - for a in axes: - if isinstance(a, str): - raise ValueError(f"Model has a string axis: {a}, please report issue to PlantSeg developers.") - sizes_in_rdf = {a.id: a.size for a in axes} - assert 'x' in sizes_in_rdf, "Model does not have 'x' axis in input tensor." - size_to_check = sizes_in_rdf[AxisId('x')] - if isinstance(size_to_check, int): # e.g. 'emotional-cricket' - # 'emotional-cricket' has {'batch': None, 'channel': 1, 'z': 100, 'y': 128, 'x': 128} - input_block_shape = { - TensorId(tensor_id): { - a.id: a.size if isinstance(a.size, int) else 1 + axis_mapping = {'b': 'batch', 'c': 'channel'} + axes = [AxisId(axis_mapping.get(a, a)) for a in list(axes)] + members = {TensorId(tensor_id): Tensor(array=raw, dims=dims).transpose([AxisId(a) for a in axes])} + sample = Sample(members=members, stat={}, id="raw") + sample_out = predict(model=model, inputs=sample) + + # If inference is supported by BioImage.IO core, this is how it should be done in PlantSeg: + # + # shape = model.inputs[0].shape + # input_block_shape = {TensorId(tensor_id): {AxisId(a): s for a, s in zip(axes, shape)}} + # sample_out = predict(model=model, inputs=sample, input_block_shape=input_block_shape) + else: + members = { + TensorId(tensor_id): Tensor(array=raw, dims=dims).transpose( + [AxisId(a) if isinstance(a, str) else a.id for a in axes] + ) + } + sample = Sample(members=members, stat={}, id="raw") + sizes_in_rdf = {a.id: a.size for a in axes} + assert 'x' in sizes_in_rdf, "Model does not have 'x' axis in input tensor." + size_to_check = sizes_in_rdf[AxisId('x')] + if isinstance(size_to_check, int): # e.g. 'emotional-cricket' + # 'emotional-cricket' has {'batch': None, 'channel': 1, 'z': 100, 'y': 128, 'x': 128} + input_block_shape = { + TensorId(tensor_id): { + a.id: a.size if isinstance(a.size, int) else 1 + for a in axes + if not isinstance(a, str) # for a.size/a.id type checking only + } + } + sample_out = predict(model=model, inputs=sample, input_block_shape=input_block_shape) + elif isinstance(size_to_check, v0_5.ParameterizedSize): # e.g. 'philosophical-panda' + # 'philosophical-panda' has: + # {'z': ParameterizedSize(min=1, step=1), + # 'channel': 2, + # 'y': ParameterizedSize(min=16, step=16), + # 'x': ParameterizedSize(min=16, step=16)} + blocksize_parameter = { + (TensorId(tensor_id), a.id): ( + (96 - a.size.min) // a.size.step if isinstance(a.size, v0_5.ParameterizedSize) else 1 + ) for a in axes if not isinstance(a, str) # for a.size/a.id type checking only } - } - sample_out = predict(model=model, inputs=sample, input_block_shape=input_block_shape) - elif isinstance(size_to_check, v0_5.ParameterizedSize): # e.g. 'philosophical-panda' - # 'philosophical-panda' has: - # {'z': ParameterizedSize(min=1, step=1), - # 'channel': 2, - # 'y': ParameterizedSize(min=16, step=16), - # 'x': ParameterizedSize(min=16, step=16)} - blocksize_parameter = { - (TensorId(tensor_id), a.id): ( - (96 - a.size.min) // a.size.step if isinstance(a.size, v0_5.ParameterizedSize) else 1 - ) - for a in axes - if not isinstance(a, str) # for a.size/a.id type checking only - } - sample_out = predict(model=model, inputs=sample, blocksize_parameter=blocksize_parameter) - else: - assert_never(size_to_check) + sample_out = predict(model=model, inputs=sample, blocksize_parameter=blocksize_parameter) + else: + assert_never(size_to_check) assert isinstance(sample_out, Sample) if len(sample_out.members) != 1: logger.warning("Model has more than one output tensor. PlantSeg does not support this yet.") + + desired_axes_short = [AxisId(a) for a in ['b', 'c', 'z', 'y', 'x']] desired_axes = [AxisId(a) for a in ['batch', 'channel', 'z', 'y', 'x']] - t = {i: o.transpose(desired_axes) for i, o in sample_out.members.items()} + t = { + i: o.transpose(desired_axes_short) if 'b' in o.dims or 'c' in o.dims else o.transpose(desired_axes) + for i, o in sample_out.members.items() + } + named_pmaps = {} for key, tensor_bczyx in t.items(): bczyx = tensor_bczyx.data.to_numpy() diff --git a/plantseg/tasks/prediction_tasks.py b/plantseg/tasks/prediction_tasks.py index 42107ae5..e84a7744 100644 --- a/plantseg/tasks/prediction_tasks.py +++ b/plantseg/tasks/prediction_tasks.py @@ -87,8 +87,7 @@ def biio_prediction_task( new_images = [] for name, pmap in named_pmaps.items(): - # Input layout is always ZYX this loop - pmap = fix_layout(pmap, input_layout=input_layout, output_layout='CZYX') + # Input layout is always CZYX this loop new_images.append( image.derive_new( pmap, From a01e65c10b04b11a311b37e945e644a3d5fd57ab Mon Sep 17 00:00:00 2001 From: Qin Yu Date: Wed, 18 Dec 2024 19:37:45 +0100 Subject: [PATCH 28/32] test: bioimage.io core prediction functional --- .../prediction/test_bioimageio_core_pred.py | 30 +++++++++++++++++++ 1 file changed, 30 insertions(+) create mode 100644 tests/functionals/prediction/test_bioimageio_core_pred.py diff --git a/tests/functionals/prediction/test_bioimageio_core_pred.py b/tests/functionals/prediction/test_bioimageio_core_pred.py new file mode 100644 index 00000000..586c82df --- /dev/null +++ b/tests/functionals/prediction/test_bioimageio_core_pred.py @@ -0,0 +1,30 @@ +from pathlib import Path + +import pooch +import pytest +import skimage.transform as skt + +from plantseg.functionals.prediction.prediction import biio_prediction +from plantseg.io.io import smart_load + +CELLPOSE_TEST_IMAGE_RGB_3D = 'http://www.cellpose.org/static/data/rgb_3D.tif' +path_rgb_3d_75x2x75x75 = Path(pooch.retrieve(CELLPOSE_TEST_IMAGE_RGB_3D, known_hash=None)) +raw_zcyx_75x2x75x75 = smart_load(path_rgb_3d_75x2x75x75) +raw_zcyx_96x2x96x96 = skt.resize(raw_zcyx_75x2x75x75, (96, 2, 96, 96), order=1) +raw_cell_3d_100x128x128 = skt.resize(raw_zcyx_75x2x75x75[:, 1], (100, 128, 128), order=1) +raw_cell_2d_96x96 = raw_cell_3d_100x128x128[48] + + +@pytest.mark.parametrize( + "raw, input_layout, model_id", + ( + (raw_zcyx_96x2x96x96, 'ZCYX', 'philosophical-panda'), + (raw_cell_3d_100x128x128, 'ZYX', 'emotional-cricket'), + (raw_cell_2d_96x96, 'YX', 'pioneering-rhino'), + ), +) +def test_biio_prediction(raw, input_layout, model_id): + named_pmaps = biio_prediction(raw, input_layout, model_id) + for key, pmap in named_pmaps.items(): + assert pmap is not None, f"Prediction map for {key} is None" + assert pmap.ndim == 4, f"Prediction map for {key} has {pmap.ndim} dimensions" From cfb9c142d7b3c35bc6320dd4984bc3d8092fc7b1 Mon Sep 17 00:00:00 2001 From: Qin Yu Date: Thu, 19 Dec 2024 15:40:22 +0100 Subject: [PATCH 29/32] fix: ZCYX doesn't throw error but corrected to CZYX --- plantseg/core/image.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/plantseg/core/image.py b/plantseg/core/image.py index 4c60eab5..b93cd13d 100644 --- a/plantseg/core/image.py +++ b/plantseg/core/image.py @@ -417,7 +417,10 @@ def _check_shape(self, data: np.ndarray, properties: ImageProperties) -> tuple[n return data[:, 0], properties elif self.image_layout == ImageLayout.ZCYX: - raise ValueError(f"Image layout {self.image_layout} not supported, should have been converted to CZYX") + logger.warning("Image layout is ZCYX but should have been converted to CZYX. PlantSeg is doing this now.") + properties.image_layout = ImageLayout.CZYX + data = np.moveaxis(data, 0, 1) + return self._check_shape(data, properties) return data, properties From b539c62d69a52da6e75b68b44f9e2068540421bf Mon Sep 17 00:00:00 2001 From: Qin Yu Date: Thu, 19 Dec 2024 15:44:07 +0100 Subject: [PATCH 30/32] test: bioimage.io core prediction task --- tests/conftest.py | 27 +++++++++++++ .../prediction/test_bioimageio_core_pred.py | 30 -------------- .../prediction/test_prediction_biio.py | 18 +++++++++ tests/tasks/test_prediction_tasks.py | 40 +++++++++++++++++-- 4 files changed, 82 insertions(+), 33 deletions(-) delete mode 100644 tests/functionals/prediction/test_bioimageio_core_pred.py create mode 100644 tests/functionals/prediction/test_prediction_biio.py diff --git a/tests/conftest.py b/tests/conftest.py index d5076888..6567c858 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -3,15 +3,42 @@ import shutil from pathlib import Path +import numpy as np +import pooch import pytest +import skimage.transform as skt import torch import yaml +from plantseg.io.io import smart_load + TEST_FILES = Path(__file__).resolve().parent / "resources" VOXEL_SIZE = (0.235, 0.15, 0.15) KEY_ZARR = "volumes/new" IS_CUDA_AVAILABLE = torch.cuda.is_available() +CELLPOSE_TEST_IMAGE_RGB_3D = 'http://www.cellpose.org/static/data/rgb_3D.tif' + + +@pytest.fixture +def raw_zcyx_75x2x75x75(tmpdir) -> np.ndarray: + path_rgb_3d_75x2x75x75 = Path(pooch.retrieve(CELLPOSE_TEST_IMAGE_RGB_3D, path=tmpdir, known_hash=None)) + return smart_load(path_rgb_3d_75x2x75x75) + + +@pytest.fixture +def raw_zcyx_96x2x96x96(raw_zcyx_75x2x75x75): + return skt.resize(raw_zcyx_75x2x75x75, (96, 2, 96, 96), order=1) + + +@pytest.fixture +def raw_cell_3d_100x128x128(raw_zcyx_75x2x75x75): + return skt.resize(raw_zcyx_75x2x75x75[:, 1], (100, 128, 128), order=1) + + +@pytest.fixture +def raw_cell_2d_96x96(raw_cell_3d_100x128x128): + return raw_cell_3d_100x128x128[48] @pytest.fixture diff --git a/tests/functionals/prediction/test_bioimageio_core_pred.py b/tests/functionals/prediction/test_bioimageio_core_pred.py deleted file mode 100644 index 586c82df..00000000 --- a/tests/functionals/prediction/test_bioimageio_core_pred.py +++ /dev/null @@ -1,30 +0,0 @@ -from pathlib import Path - -import pooch -import pytest -import skimage.transform as skt - -from plantseg.functionals.prediction.prediction import biio_prediction -from plantseg.io.io import smart_load - -CELLPOSE_TEST_IMAGE_RGB_3D = 'http://www.cellpose.org/static/data/rgb_3D.tif' -path_rgb_3d_75x2x75x75 = Path(pooch.retrieve(CELLPOSE_TEST_IMAGE_RGB_3D, known_hash=None)) -raw_zcyx_75x2x75x75 = smart_load(path_rgb_3d_75x2x75x75) -raw_zcyx_96x2x96x96 = skt.resize(raw_zcyx_75x2x75x75, (96, 2, 96, 96), order=1) -raw_cell_3d_100x128x128 = skt.resize(raw_zcyx_75x2x75x75[:, 1], (100, 128, 128), order=1) -raw_cell_2d_96x96 = raw_cell_3d_100x128x128[48] - - -@pytest.mark.parametrize( - "raw, input_layout, model_id", - ( - (raw_zcyx_96x2x96x96, 'ZCYX', 'philosophical-panda'), - (raw_cell_3d_100x128x128, 'ZYX', 'emotional-cricket'), - (raw_cell_2d_96x96, 'YX', 'pioneering-rhino'), - ), -) -def test_biio_prediction(raw, input_layout, model_id): - named_pmaps = biio_prediction(raw, input_layout, model_id) - for key, pmap in named_pmaps.items(): - assert pmap is not None, f"Prediction map for {key} is None" - assert pmap.ndim == 4, f"Prediction map for {key} has {pmap.ndim} dimensions" diff --git a/tests/functionals/prediction/test_prediction_biio.py b/tests/functionals/prediction/test_prediction_biio.py new file mode 100644 index 00000000..7334e280 --- /dev/null +++ b/tests/functionals/prediction/test_prediction_biio.py @@ -0,0 +1,18 @@ +import pytest + +from plantseg.functionals.prediction.prediction import biio_prediction + + +@pytest.mark.parametrize( + "raw_fixture_name, input_layout, model_id", + ( + ('raw_zcyx_96x2x96x96', 'ZCYX', 'philosophical-panda'), + ('raw_cell_3d_100x128x128', 'ZYX', 'emotional-cricket'), + ('raw_cell_2d_96x96', 'YX', 'pioneering-rhino'), + ), +) +def test_biio_prediction(raw_fixture_name, input_layout, model_id, request): + named_pmaps = biio_prediction(request.getfixturevalue(raw_fixture_name), input_layout, model_id) + for key, pmap in named_pmaps.items(): + assert pmap is not None, f"Prediction map for {key} is None" + assert pmap.ndim == 4, f"Prediction map for {key} has {pmap.ndim} dimensions" diff --git a/tests/tasks/test_prediction_tasks.py b/tests/tasks/test_prediction_tasks.py index b3cad4ea..b4d8d2eb 100644 --- a/tests/tasks/test_prediction_tasks.py +++ b/tests/tasks/test_prediction_tasks.py @@ -3,7 +3,7 @@ from plantseg.core.image import ImageLayout, ImageProperties, PlantSegImage, SemanticType from plantseg.io.voxelsize import VoxelSize -from plantseg.tasks.prediction_tasks import unet_prediction_task +from plantseg.tasks.prediction_tasks import biio_prediction_task, unet_prediction_task @pytest.mark.parametrize( @@ -13,7 +13,7 @@ ((64, 64), ImageLayout.YX, 'confocal_2D_unet_ovules_ds2x'), ], ) -def test_unet_prediction(shape, layout, model_name): +def test_unet_prediction_task(shape, layout, model_name): mock_data = np.random.rand(*shape).astype('float32') property = ImageProperties( @@ -25,7 +25,12 @@ def test_unet_prediction(shape, layout, model_name): ) image = PlantSegImage(data=mock_data, properties=property) - result = unet_prediction_task(image=image, model_name=model_name, model_id=None, device='cpu') + result = unet_prediction_task( + image=image, + model_name=model_name, + model_id=None, + device='cpu', + ) assert len(result) == 1 result = result[0] @@ -34,3 +39,32 @@ def test_unet_prediction(shape, layout, model_name): assert result.image_layout == property.image_layout assert result.voxel_size == property.voxel_size assert result.shape == mock_data.shape + + +@pytest.mark.parametrize( + "raw_fixture_name, input_layout, model_id", + ( + ('raw_zcyx_96x2x96x96', 'ZCYX', 'philosophical-panda'), + ('raw_cell_3d_100x128x128', 'ZYX', 'emotional-cricket'), + ('raw_cell_2d_96x96', 'YX', 'pioneering-rhino'), + ), +) +def test_biio_prediction_task(raw_fixture_name, input_layout, model_id, request): + image = PlantSegImage( + data=request.getfixturevalue(raw_fixture_name), + properties=ImageProperties( + name='test', + voxel_size=VoxelSize(voxels_size=(1.0, 1.0, 1.0), unit='um'), + semantic_type=SemanticType.RAW, + image_layout=input_layout, + original_voxel_size=VoxelSize(voxels_size=(1.0, 1.0, 1.0), unit='um'), + ), + ) + result = biio_prediction_task( + image=image, + model_id=model_id, + suffix="_biio_prediction", + ) + for new_image in result: + assert new_image.semantic_type == SemanticType.PREDICTION + assert '_biio_prediction' in new_image.name From 38735752b55967f9fb7031d45b082849f4c6a8ab Mon Sep 17 00:00:00 2001 From: Qin Yu Date: Thu, 19 Dec 2024 17:57:12 +0100 Subject: [PATCH 31/32] fix: fix parts of #373 --- plantseg/viewer_napari/widgets/dataprocessing.py | 12 +++++++++++- plantseg/viewer_napari/widgets/segmentation.py | 12 +++++++++++- 2 files changed, 22 insertions(+), 2 deletions(-) diff --git a/plantseg/viewer_napari/widgets/dataprocessing.py b/plantseg/viewer_napari/widgets/dataprocessing.py index 473c1403..129762a8 100644 --- a/plantseg/viewer_napari/widgets/dataprocessing.py +++ b/plantseg/viewer_napari/widgets/dataprocessing.py @@ -129,8 +129,15 @@ def widget_cropping( ) +initialised_widget_cropping: bool = ( + False # Avoid throwing an error when the first image is loaded but its layout is not supported +) + + @widget_cropping.image.changed.connect def _on_cropping_image_changed(image: Layer): + global initialised_widget_cropping + if image is None: widget_cropping.crop_z.hide() return None @@ -145,7 +152,10 @@ def _on_cropping_image_changed(image: Layer): return None if ps_image.is_multichannel: - raise ValueError("Multichannel images are not supported for cropping.") + if initialised_widget_cropping: + raise ValueError("Multichannel images are not supported for cropping.") + else: + initialised_widget_cropping = True widget_cropping.crop_z.show() image_shape_z = ps_image.shape[0] diff --git a/plantseg/viewer_napari/widgets/segmentation.py b/plantseg/viewer_napari/widgets/segmentation.py index a4e5ccef..5e2868ae 100644 --- a/plantseg/viewer_napari/widgets/segmentation.py +++ b/plantseg/viewer_napari/widgets/segmentation.py @@ -219,8 +219,15 @@ def _on_show_advanced_changed(state: bool): widget.hide() +initialised_widget_dt_ws: bool = ( + False # Avoid throwing an error when the first image is loaded but its layout is not supported +) + + @widget_dt_ws.image.changed.connect def _on_image_changed(image: Image): + global initialised_widget_dt_ws + ps_image = PlantSegImage.from_napari_layer(image) if ps_image.image_layout == ImageLayout.ZYX: @@ -229,4 +236,7 @@ def _on_image_changed(image: Image): widget_dt_ws.stacked.hide() widget_dt_ws.stacked.value = False if ps_image.image_layout != ImageLayout.YX: - log(f"Unsupported image layout: {ps_image.image_layout}", thread="DT Watershed", level="error") + if initialised_widget_dt_ws: + log(f"Unsupported image layout: {ps_image.image_layout}", thread="DT Watershed", level="error") + else: + initialised_widget_dt_ws = True From fabae0a8d2cd6342be4bb20842e6e160e77feed1 Mon Sep 17 00:00:00 2001 From: Qin Yu Date: Thu, 19 Dec 2024 17:59:18 +0100 Subject: [PATCH 32/32] feat: show both model ID and name for BioImage.IO models --- plantseg/core/zoo.py | 25 +++++++------- plantseg/viewer_napari/widgets/prediction.py | 34 ++++++++++---------- 2 files changed, 31 insertions(+), 28 deletions(-) diff --git a/plantseg/core/zoo.py b/plantseg/core/zoo.py index 69e3fa50..554c3588 100644 --- a/plantseg/core/zoo.py +++ b/plantseg/core/zoo.py @@ -457,23 +457,26 @@ def _is_plantseg_model(self, collection_entry: dict) -> bool: normalized_tags = ["".join(filter(str.isalnum, tag.lower())) for tag in tags] return 'plantseg' in normalized_tags - def get_bioimageio_zoo_plantseg_model_names(self) -> list[str]: - """Return a list of model names in the BioImage.IO Model Zoo tagged with 'plantseg'.""" + def get_bioimageio_zoo_all_model_names(self) -> list[tuple[str, str]]: + """Return a list of (model id, model display name) in the BioImage.IO Model Zoo.""" if not hasattr(self, 'models_bioimageio'): self.refresh_bioimageio_zoo_urls() - return sorted(model_zoo.models_bioimageio[model_zoo.models_bioimageio["supported"]].index.to_list()) + id_name = self.models_bioimageio[['name_display']] + return sorted([(name, id) for id, name in id_name.itertuples()]) - def get_bioimageio_zoo_all_model_names(self) -> list[str]: - """Return a list of all model names in the BioImage.IO Model Zoo.""" + def get_bioimageio_zoo_plantseg_model_names(self) -> list[tuple[str, str]]: + """Return a list of (model id, model display name) in the BioImage.IO Model Zoo tagged with 'plantseg'.""" if not hasattr(self, 'models_bioimageio'): self.refresh_bioimageio_zoo_urls() - return sorted(model_zoo.models_bioimageio.index.to_list()) + id_name = self.models_bioimageio[self.models_bioimageio["supported"]][['name_display']] + return sorted([(name, id) for id, name in id_name.itertuples()]) - def get_bioimageio_zoo_other_model_names(self) -> list[str]: - """Return a list of model names in the BioImage.IO Model Zoo not tagged with 'plantseg'.""" - return sorted( - list(set(self.get_bioimageio_zoo_all_model_names()) - set(self.get_bioimageio_zoo_plantseg_model_names())) - ) + def get_bioimageio_zoo_other_model_names(self) -> list[tuple[str, str]]: + """Return a list of (model id, model display name) in the BioImage.IO Model Zoo not tagged with 'plantseg'.""" + if not hasattr(self, 'models_bioimageio'): + self.refresh_bioimageio_zoo_urls() + id_name = self.models_bioimageio[~self.models_bioimageio["supported"]][['name_display']] + return sorted([(name, id) for id, name in id_name.itertuples()]) def _flatten_module(self, module: Module) -> list[Module]: """Recursively flatten a PyTorch nn.Module into a list of its elemental layers.""" diff --git a/plantseg/viewer_napari/widgets/prediction.py b/plantseg/viewer_napari/widgets/prediction.py index 28e5164f..311dd5fa 100644 --- a/plantseg/viewer_napari/widgets/prediction.py +++ b/plantseg/viewer_napari/widgets/prediction.py @@ -106,6 +106,7 @@ def to_choices(cls): 'label': 'BioImage.IO model', 'tooltip': 'Select a model from BioImage.IO model zoo.', 'choices': model_zoo.get_bioimageio_zoo_plantseg_model_names(), + 'value': model_zoo.get_bioimageio_zoo_plantseg_model_names()[0][1], }, advanced={ 'label': 'Show advanced parameters', @@ -131,7 +132,7 @@ def widget_unet_prediction( mode: UNetPredictionMode = UNetPredictionMode.PLANTSEG, plantseg_filter: bool = True, model_name: Optional[str] = None, - model_id: Optional[str] = None, + model_id: Optional[str] = model_zoo.get_bioimageio_zoo_plantseg_model_names()[0][1], device: str = ALL_DEVICES[0], advanced: bool = False, patch_size: tuple[int, int, int] = (128, 128, 128), @@ -139,14 +140,15 @@ def widget_unet_prediction( single_patch: bool = False, ) -> None: ps_image = PlantSegImage.from_napari_layer(image) - widgets_to_update = [ - widget_dt_ws.image, - widget_agglomeration.image, - widget_split_and_merge_from_scribbles.image, - ] + if mode is UNetPredictionMode.PLANTSEG: suffix = model_name model_id = None + widgets_to_update = [ + widget_dt_ws.image, + widget_agglomeration.image, + widget_split_and_merge_from_scribbles.image, + ] return schedule_task( unet_prediction_task, task_kwargs={ @@ -164,6 +166,10 @@ def widget_unet_prediction( elif mode is UNetPredictionMode.BIOIMAGEIO: suffix = model_id model_name = None + widgets_to_update = [ + # BioImage.IO models may output multi-channel 3D image or even multi-channel scalar in CZYX format. + # So PlantSeg widgets, which all take ZYX or YX, are better not to be updated. + ] return schedule_task( biio_prediction_task, task_kwargs={ @@ -210,17 +216,11 @@ def update_halo(): widget_unet_prediction.patch_size[0].enabled = True widget_unet_prediction.patch_halo[0].enabled = True elif widget_unet_prediction.mode.value is UNetPredictionMode.BIOIMAGEIO: - widget_unet_prediction.patch_halo.value = model_zoo.compute_3D_halo_for_bioimageio_models( - widget_unet_prediction.model_id.value + log( + 'Automatic halo not implemented for BioImage.IO models yet because they are handled by BioImage.IO Core.', + thread='BioImage.IO Core prediction', + level='info', ) - if model_zoo.is_2D_bioimageio_model(widget_unet_prediction.model_id.value): - widget_unet_prediction.patch_size[0].value = 0 - widget_unet_prediction.patch_size[0].enabled = False - widget_unet_prediction.patch_halo[0].enabled = False - else: - widget_unet_prediction.patch_size[0].value = widget_unet_prediction.patch_size[1].value - widget_unet_prediction.patch_size[0].enabled = True - widget_unet_prediction.patch_halo[0].enabled = True else: raise NotImplementedError(f'Automatic halo not implemented for {widget_unet_prediction.mode.value} mode.') @@ -270,7 +270,7 @@ def _on_widget_unet_prediction_plantseg_filter_change(plantseg_filter: bool): else: widget_unet_prediction.model_id.choices = ( model_zoo.get_bioimageio_zoo_plantseg_model_names() - + [Separator] + + [('', Separator)] # `[('', Separator)]` for list[tuple[str, str]], [Separator] for list[str] + model_zoo.get_bioimageio_zoo_other_model_names() )