diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 5ab906587..5843cae58 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -2,105 +2,109 @@ name: Test and Deploy bioimageio.spec on: push: - branches: [ main ] + branches: [main] pull_request: - branches: [ "**" ] + branches: ["**"] jobs: - black: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - name: Check files using the black formatter - uses: rickstaa/action-black@v1 - id: action_black - with: - black_args: "." - - name: Annotate diff changes using reviewdog - if: steps.action_black.outputs.is_formatted == 'true' - uses: reviewdog/action-suggester@v1 - with: - tool_name: blackfmt - test: runs-on: ubuntu-latest strategy: + fail-fast: false matrix: - python-version: [3.7, 3.8, 3.9] - + python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] + include: + - python-version: "3.8" + test-format: true steps: - - uses: actions/checkout@v3 - - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python-version }} - cache: 'pip' - - name: Install dependencies - run: | - pip install --upgrade pip - pip install -e .[test] - - name: Test with pytest - run: pytest tests - - name: Check passthrough models - run: python scripts/generate_passthrough_modules.py check -# todo: add mypy checks for python 3.10 when we can add KW_ONLY to dataclasses -# allowing dataclass inheritance w/o the 'missing' default value -# - name: MyPy -# if: ${{ matrix.python-version == '3.10' }} -# run: | -# mkdir -p .mypy-cache -# mypy . --install-types --non-interactive --cache-dir .mypy-cache --explicit-package-bases --check-untyped-defs + - uses: actions/checkout@v4 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + cache: "pip" + - name: Install dependencies + run: | + pip install --upgrade pip + pip install -e .[dev] + - name: Get Date + id: get-date + run: | + echo "week=$(/bin/date -u "+%Y-%U")" >> $GITHUB_OUTPUT + shell: bash + - uses: actions/cache@v3 + with: + path: tests/cache + key: ${{ runner.os }}-${{ matrix.python-version }}-${{ steps.get-date.outputs.week }}-${{ hashFiles('**/lockfiles') }} + - name: Check autogenerated imports + run: python scripts/generate_version_submodule_imports.py check + - run: black . + if: matrix.test-format + - run: ruff check **/*.py # ignore notebooks for now + if: matrix.test-format + - run: pyright --version + - run: pyright -p pyproject.toml + - run: pytest - conda-build: + deploy: runs-on: ubuntu-latest - needs: test steps: - - name: checkout - uses: actions/checkout@v3 - with: - fetch-depth: 0 - - name: Install Conda environment with Micromamba - uses: mamba-org/provision-with-micromamba@main - with: - environment-file: false - environment-name: build-env - channels: conda-forge - extra-specs: | - boa - - name: linux conda build - shell: bash -l {0} - run: | - conda mambabuild -c conda-forge conda-recipe + - uses: actions/checkout@v4 + - uses: actions/setup-python@v4 + with: + python-version: "3.12" + cache: "pip" + - name: Install dependencies + run: | + pip install --upgrade pip + pip install -e .[dev] + - name: Generate spec docs + run: python scripts/generate_spec_documentation.py --dist dist/user_docs + - name: Generate JSON schemas + run: python scripts/generate_json_schemas.py + - name: Generate developer docs + run: pdoc -o ./dist bioimageio.spec + - name: Get branch name to deploy to + id: get_branch + shell: bash + run: | + if [[ -n '${{ github.event.pull_request.head.ref }}' ]]; then branch=gh-pages-${{ github.event.pull_request.head.ref }}; else branch=gh-pages; fi + echo "::set-output name=branch::$branch" + - name: Deploy to ${{ steps.get_branch.outputs.branch }} ๐Ÿš€ + uses: JamesIves/github-pages-deploy-action@v4 + with: + branch: ${{ steps.get_branch.outputs.branch }} + folder: dist - deploy: + mamba-build: runs-on: ubuntu-latest needs: test steps: - - uses: actions/checkout@v3 - - name: Set up Python 3.9 - uses: actions/setup-python@v4 - with: - python-version: 3.9 - - name: Install dependencies - run: | - pip install . - - name: Generate Docs - run: python scripts/generate_rdf_docs.py - - name: Generate pre-/postprocessing docs - run: python scripts/generate_processing_docs.py - - name: Generate weight formats docs - run: python scripts/generate_weight_formats_docs.py - - name: Generate JSON Schema - run: python scripts/generate_json_specs.py - - name: Generate weight formats overview - run: python scripts/generate_weight_formats_overview.py generate - - name: Get branch name to deploy to - id: get_branch - shell: bash - run: | - if [[ -n '${{ github.event.pull_request.head.ref }}' ]]; then branch=gh-pages-${{ github.event.pull_request.head.ref }}; else branch=gh-pages; fi - echo "::set-output name=branch::$branch" - - name: Deploy to ${{ steps.get_branch.outputs.branch }} ๐Ÿš€ - uses: JamesIves/github-pages-deploy-action@v4 - with: - branch: ${{ steps.get_branch.outputs.branch }} - folder: dist + - name: checkout + uses: actions/checkout@v3 + with: + fetch-depth: 0 + - name: Install Conda environment with Micromamba + uses: mamba-org/setup-micromamba@v1 + with: + cache-downloads: true + cache-environment: true + environment-name: build-env + condarc: | + channels: + - conda-forge + create-args: >- + boa + - name: linux conda build + run: | + conda mambabuild -c conda-forge conda-recipe + shell: bash -l {0} + +# rattler-build: +# name: Build package +# runs-on: ubuntu-latest +# steps: +# - uses: actions/checkout@v4 +# - name: Build conda package +# uses: prefix-dev/rattler-build-action@v0.2.2 +# with: +# recipe-path: conda-recipe/recipe.yaml diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index c42b66b47..74e18cbe6 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -18,7 +18,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v4 with: - python-version: "3.8" + python-version: "3.9" cache: 'pip' - name: Install dependencies run: | diff --git a/.gitignore b/.gitignore index 7b814942c..feb4114a7 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,12 @@ __pycache__/ -.vscode -tmp +.env +.vscode/launch.json *.egg-info/ -dist \ No newline at end of file +build/ +dist/ +docs/ +user_docs/ +tests/cache +tests/generated_json_schemas +tmp/ +output/ diff --git a/.markdownlint.json b/.markdownlint.json new file mode 100644 index 000000000..47db06ff7 --- /dev/null +++ b/.markdownlint.json @@ -0,0 +1,6 @@ +{ + "default": true, + "MD013": false, + "MD033": false, + "MD041": false +} \ No newline at end of file diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d9e991083..747905195 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,59 +1,37 @@ repos: - repo: https://github.com/ambv/black - rev: 22.6.0 + rev: 24.2.0 hooks: - - id: black - - repo: https://github.com/pre-commit/mirrors-mypy - rev: v0.991 + - id: black + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.2.1 hooks: - - id: mypy - additional_dependencies: [types-requests] - args: [--install-types, --non-interactive, --explicit-package-bases, --check-untyped-defs] + - id: ruff + args: [--fix] - repo: local hooks: - - id: generate rdf docs - name: generate rdf docs - entry: python scripts/generate_rdf_docs.py - language: system - always_run: true - pass_filenames: false - - repo: local - hooks: - - id: generate processing docs - name: generate processing docs - entry: python scripts/generate_processing_docs.py - language: system - always_run: true - pass_filenames: false - - repo: local - hooks: - - id: generate weight formats docs - name: generate weight formats docs - entry: python scripts/generate_weight_formats_docs.py - language: system - always_run: true - pass_filenames: false - - repo: local - hooks: - - id: generate json schemas - name: generate json schemas - entry: python scripts/generate_json_specs.py - language: system - always_run: true - pass_filenames: false - - repo: local - hooks: - - id: generate passthrough modules - name: generate passthrough modules - entry: python scripts/generate_passthrough_modules.py generate - language: system - always_run: true - pass_filenames: false - - repo: local - hooks: - - id: generate weight formats overview - name: generate weight formats overview - entry: python scripts/generate_weight_formats_overview.py generate - language: system - always_run: true - pass_filenames: false + - id: pyright + name: pyright + entry: pyright + language: system + always_run: true + pass_filenames: true + files: ^.*\.py$ + - id: generate passthrough modules + name: generate passthrough modules + entry: python scripts/generate_version_submodule_imports.py generate + language: system + always_run: true + pass_filenames: false + - id: generate json schemas + name: generate json modules + entry: python scripts/generate_json_schemas.py + language: system + always_run: true + pass_filenames: false + - id: generate spec md docs + name: generate spec md docs + entry: python scripts/generate_spec_documentation.py + language: system + always_run: true + pass_filenames: false diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 000000000..eaa98b68e --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,19 @@ +{ + "window.title": "bioimageio.spec", + "python.testing.unittestArgs": [ + "-v", + "-s", + "./tests", + "-p", + "test_*.py" + ], + "python.testing.pytestEnabled": true, + "python.testing.unittestEnabled": false, + "yaml.schemas": { + "dist/bioimageio_schema_latest.json": [ + "rdf.yaml", + "bioimageio.yaml", + "*.bioimageio.yaml" + ], + } +} diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 000000000..2d01953ae --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,6 @@ +Thank you for considering to contribute to bioimageio.spec! + +Find good first issues [here](https://github.com/bioimage-io/spec-bioimage-io/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22). +Open or join a discussion [here](https://github.com/bioimage-io/spec-bioimage-io/discussions). + +More refined instructions on how to contribute will follow. diff --git a/MANIFEST.in b/MANIFEST.in index 33eb32981..f820beb4f 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,4 +1,5 @@ include bioimageio/spec/VERSION include README.md include LICENSE -include bioimageio/spec/static/licenses.json +include bioimageio/spec/static/spdx_licenses.json +include bioimageio/spec/static/tag_categories.json diff --git a/README.md b/README.md index 0ef613103..8faef8a12 100644 --- a/README.md +++ b/README.md @@ -1,345 +1,394 @@ ![License](https://img.shields.io/github/license/bioimage-io/spec-bioimage-io.svg) ![PyPI](https://img.shields.io/pypi/v/bioimageio-spec.svg?style=popout) ![conda-version](https://anaconda.org/conda-forge/bioimageio.spec/badges/version.svg) -# Specifications for BioImage.IO -This repository contains specifications defined by the BioImage.IO community. These specifications are used for defining fields in YAML files which we called `Resource Description Files` or `RDF`. The RDFs can be downloaded or uploaded to the [bioimage.io website](https://bioimage.io), produced or consumed by BioImage.IO-compatible consumers(e.g. image analysis software or other website). Currently we defined two types of RDFs: a dedicated RDF specification for AI models (i.e. `model RDF`) and a general RDF specification. The model RDF is a RDF with additional fields that specifically designed for describing AI models. +# Specifications for bioimage.io +This repository contains specifications defined by the bioimage.io community. These specifications are used for defining fields in YAML 1.2 files which should be named `rdf.yaml`. Such a rdf.yaml --- along with files referenced in it --- can be downloaded from or uploaded to the [bioimage.io website](https://bioimage.io) and may be produced or consumed by bioimage.io-compatible consumers (e.g. image analysis software like ilastik). -All the BioImage.IO-compatible RDF must fulfill the following rules: - * Must be a YAML file encoded as UTF-8; If yaml syntax version is not specified to be 1.1 in the first line by `% YAML 1.1` it must be equivalent in yaml 1.1 and yaml 1.2. For differences see https://yaml.readthedocs.io/en/latest/pyyaml.html#differences-with-pyyaml. - * The RDF file extension must be `.yaml` (not `.yml`) - * The RDF file can be saved in a folder (or virtual folder) or in a zip package, the following additional rules must apply: - 1. When stored in a local file system folder, github repo, zenodo deposition, blob storage virtual folder or similar kind, the RDF file name should match the pattern of `*.rdf.yaml`, for example `my-model.rdf.yaml`. - 2. When the RDF file and other files are zipped into a RDF package, it must be named as `rdf.yaml`. +bioimage.io-compatible resources must fulfill the following rules: -As a general guideline, please follow the model RDF spec to describe AI models and use the general RDF spec for other resource types including `dataset`, `application`. You will find more details about these two specifications in the following sections. Please also note that the best way to check whether your RDF file is BioImage.IO-compliant is to run the BioImage.IO Validator against it. +Note that the Python package PyYAML does not support YAML 1.2 . +We therefore use and recommend [ruyaml](https://ruyaml.readthedocs.io/en/latest/). +For differences see . -## Resource Description File Specification +Please also note that the best way to check whether your `rdf.yaml` file is bioimage.io-compliant is to call `bioimageio.core.validate` from the [bioimageio.core](https://github.com/bioimage-io/core-bioimage-io-python) Python package. +The [bioimageio.core](https://github.com/bioimage-io/core-bioimage-io-python) Python package also provides the bioimageio command line interface (CLI) with the `validate` command: -A BioImage.IO-compatible Resource Description File (RDF) is a YAML file with a set of specifically defined fields. +```terminal +bioimageio validate path/to/your/rdf.yaml +``` + +## Format version overview -You can find detailed field definitions here: - - [general RDF spec (latest)](https://github.com/bioimage-io/spec-bioimage-io/blob/gh-pages/rdf_spec_latest.md) - - [general RDF spec (0.2.x)](https://github.com/bioimage-io/spec-bioimage-io/blob/gh-pages/rdf_spec_0_2.md) +All bioimage.io description formats are defined as [Pydantic models](https://docs.pydantic.dev/latest/). -The specifications are also available as json schemas: - - [general RDF spec (0.2.x, json schema)](https://github.com/bioimage-io/spec-bioimage-io/blob/gh-pages/rdf_spec_0_2.json) +| type | format version | documentation | +| --- | --- | --- | +| model | 0.5
0.4 | [model_spec_v0-5.md](https://github.com/bioimage-io/spec-bioimage-io/blob/gh-pages/model_spec_v0-5.md)
[model_spec_v0-4.md](https://github.com/bioimage-io/spec-bioimage-io/blob/gh-pages/model_spec_v0-4.md) | +| dataset | 0.3
0.2 | [dataset_spec_v0-3.md](https://github.com/bioimage-io/spec-bioimage-io/blob/gh-pages/dataset_spec_v0-3.md)
[dataset_spec_v0-2.md](https://github.com/bioimage-io/spec-bioimage-io/blob/gh-pages/dataset_spec_v0-2.md) | +| notebook | 0.3
0.2 | [notebook_spec_v0-3.md](https://github.com/bioimage-io/spec-bioimage-io/blob/gh-pages/notebook_spec_v0-3.md)
[notebook_spec_v0-2.md](https://github.com/bioimage-io/spec-bioimage-io/blob/gh-pages/notebook_spec_v0-2.md) | +| application | 0.3
0.2 | [application_spec_v0-3.md](https://github.com/bioimage-io/spec-bioimage-io/blob/gh-pages/application_spec_v0-3.md)
[application_spec_v0-2.md](https://github.com/bioimage-io/spec-bioimage-io/blob/gh-pages/application_spec_v0-2.md) | +| collection | 0.3
0.2 | [collection_spec_v0-3.md](https://github.com/bioimage-io/spec-bioimage-io/blob/gh-pages/collection_spec_v0-3.md)
[collection_spec_v0-2.md](https://github.com/bioimage-io/spec-bioimage-io/blob/gh-pages/collection_spec_v0-2.md) | +| generic | 0.3
0.2 | [generic_spec_v0-3.md](https://github.com/bioimage-io/spec-bioimage-io/blob/gh-pages/generic_spec_v0-3.md)
[generic_spec_v0-2.md](https://github.com/bioimage-io/spec-bioimage-io/blob/gh-pages/generic_spec_v0-2.md) | -[Here](https://github.com/bioimage-io/spec-bioimage-io/blob/main/example_specs/rdf-examples.md) you can find some examples for using RDF to describe applications, notebooks, datasets etc. +## JSON schema -## Model Resource Description File Specification +Simplified descriptions are available as [JSON schema](https://json-schema.org/): -Besides the general RDF spec, the `Model Resource Description File Specification`(`model RDF`) defines a file format for representing pretrained AI models in [YAML format](https://en.wikipedia.org/wiki/YAML). This format is used to describe models hosted on the [BioImage.IO](https://bioimage.io) model repository site. +| bioimageio.spec version | JSON schema | +| --- | --- | +| latest | [bioimageio_schema_latest.json](https://github.com/bioimage-io/spec-bioimage-io/blob/gh-pages/bioimageio_schema_latest.json) | +| 0.5 | [bioimageio_schema_v0-5.json](https://github.com/bioimage-io/spec-bioimage-io/blob/gh-pages/bioimageio_schema_v0-5.json) | -You can find the latest `model RDF` here: - - [model RDF spec (latest)](https://github.com/bioimage-io/spec-bioimage-io/blob/gh-pages/model_spec_latest.md) +These are primarily intended for syntax highlighting and form generation. -Here is a list of model RDF Examples: - - [Model RDF Examples](https://github.com/bioimage-io/spec-bioimage-io/tree/main/example_specs/models). +## Examples +We provide some [examples for using rdf.yaml files to describe models, applications, notebooks and datasets](https://github.com/bioimage-io/spec-bioimage-io/blob/main/example_specs/examples.md). -## Collection Resource Description File Specification +## ๐Ÿ’ Recommendations -The [`Collection Resource Description File Specification`](https://github.com/bioimage-io/spec-bioimage-io/blob/gh-pages/collection_spec_latest.md)(`collection RDF`) defines a file format for representing collections of resources for the [BioImage.IO](https://bioimage.io) website. +* Due to the limitations of storage services such as Zenodo, which does not support subfolders, it is recommended to place other files in the same directory level of the `rdf.yaml` file and try to avoid using subdirectories. +* Use the [bioimageio.core Python package](https://github.com/bioimage-io/core-bioimage-io-python) to validate your `rdf.yaml` file. +* bioimageio.spec keeps evolving. Try to use and upgrade to the most current format version! -You can find the latest `collection RDF` here: - - [collection RDF spec (latest)](https://github.com/bioimage-io/spec-bioimage-io/blob/gh-pages/collection_spec_latest.md) +## โŒจ bioimageio command-line interface (CLI) +The bioimageio CLI has moved entirely to [bioimageio.core](https://github.com/bioimage-io/core-bioimage-io-python). -## Linking resource items +## ๐Ÿ–ฅ Installation -You can create links to connect resource items by adding another resource item id to the `links` field. For example, if you want to associate an applicaiton with a model, you can set the links field of the models like the following: -```yaml -application: - - id: HPA-Classification - source: https://raw.githubusercontent.com/bioimage-io/tfjs-bioimage-io/master/apps/HPA-Classification.imjoy.html +bioimageio.spec can be installed with either `conda` or `pip`, we recommend to install `bioimageio.core` instead: -model: - - id: HPAShuffleNetV2 - source: https://raw.githubusercontent.com/bioimage-io/tfjs-bioimage-io/master/models/HPAShuffleNetV2/HPAShuffleNetV2.model.yaml - links: - - HPA-Classification +```console +conda install -c conda-forge bioimageio.core ``` -## Hosting RDFs +or -You can host the resource description file on one of the public git repository website, including Zenodo Github, Gitlab, Bitbucket, or Gist. -In order to make it available in https://bioimage.io, you can submit the RDF package via the uploader. +```console +pip install -U bioimageio.core +``` +## ๐Ÿค How to contribute -## Recommendations +## โ™ฅ Contributors - * For AI models, consider using the model-specific spec (i.e. [model RDF](https://github.com/bioimage-io/spec-bioimage-io/blob/gh-pages/model_spec_latest.md)) instead of the general RDF. Only fallback to the general RDF if writing model specific RDF is not possible for some reason. - * The RDF or package file name should not contain spaces or special characters, it should be concise, descriptive, in kebab case or camel case. - * Due to the limitations of storage services such as Zenodo, which does not support subfolders, it is recommended to place other files in the same directory level of the RDF file and try to avoid using subdirectories. - * Use the [bioimage.io spec validator](#bioimageio-spec-validator) to verify your YAML file - * Store the yaml file in a version controlled Git repository (e.g. Github or Gitlab) - * Use or upgrade to the latest format version - + + bioimageio.spec contributors + -# bioimageio command-line interface (CLI) -The BioImage.IO command line tool makes it easy to work with BioImage.IO RDFs. -A basic version of it, documented here, is provided by the [bioimageio.spec package](bioimageio-python-package), which is extended by the [bioimageio.core](https://github.com/bioimage-io/core-bioimage-io-python) package. +Made with [contrib.rocks](https://contrib.rocks). +## ฮ” Changelog -## validate +### bioimageio.spec Python package -It is recommended to use this validator to verify your models when you write it manually or develop tools for generating RDF files. +#### bioimageio.spec 0.5.0 +* new description formats: [generic 0.3, application 0.3, collection 0.3, dataset 0.3, notebook 0.3](generic-030--application-030--collection-030--dataset-030--notebook-030) and [model 0.5](model-050). +* various API changes, most important functions: + * `bioimageio.spec.load_description` (replaces `load_raw_resource_description`, interface changed) + * `bioimageio.spec.validate_format` (new) + * `bioimageio.spec.dump_description` (replaces `serialize_raw_resource_description_to_dict`, interface changed) + * `bioimageio.spec.update_format` (interface changed) +* switch from Marshmallow to Pydantic + * extended validation + * one joint, more precise JSON schema -Use the `validate` command to check for formatting errors like missing or invalid values: -``` -bioimageio validate -``` +#### bioimageio.spec 0.4.9 -`` may be a local RDF yaml "`/rdf.yaml`" or a DOI / URL to a zenodo record, or a URL to an rdf.yaml file. +* small bugixes +* better type hints +* improved tests -To see if your model is compatible to the [latest bioimage.io model format](https://github.com/bioimage-io/spec-bioimage-io/blob/gh-pages/model_spec_latest.md) use the spec validator with the `--update-format` flag: -``` -bioimageio validate --update-format `` -``` +#### bioimageio.spec 0.4.8post1 -The output of the `validate` command will indicate missing or invalid fields in the model file. For example, if the field `timestamp` was missing it would print the following: -``` -{'timestamp': ['Missing data for required field.']} -``` -or if the field `test_inputs` does not contain a list, it would print: -``` -{'test_inputs': ['Not a valid list.']}. -``` +* add `axes` and `eps` to `scale_mean_var` -## update-format -Similar to the `validate` command with `--update-format` flag the `update-format` command attempts to convert an RDF -to the latest applicable format version, but saves the result in a file for further manual editing: -``` -bioimageio update-format -``` +#### bioimageio.spec 0.4.7post1 -# bioimageio.spec Python package -The bioimageio.spec package allows to work with BioImage.IO RDFs within Python. -The commands on which the bioimageio CLI is based can be used as functions. -Additionally, IO functions are provided to work with BioImage.IO RDFs as 'raw nodes' Python objects, e.g. the raw representation of a model RDF 0.4 at [bioimageio.spec.model.v0_4.raw_nodes](bioimageio/spec/model/v0_4/raw_nodes.py#L122-L140). -The [bioimageio.core](https://github.com/bioimage-io/core-bioimage-io-python) package extends this 'raw' representation for more convenience. +* add simple forward compatibility by treating future format versions as latest known (for the respective resource type) -[//]: # (TODO: documentation for bioimageio.spec) +#### bioimageio.spec 0.4.6post3 +* Make CLI output more readable -## installation -bioimageio.spec can be installed with either `pip` or `conda`: +* find redirected URLs when checking for URL availability -``` -# pip -pip install -U bioimageio.spec +#### bioimageio.spec 0.4.6post2 -# conda -conda install -c conda-forge bioimageio.spec -``` +* Improve error message for non-existing RDF file path given as string -As a dependency it is included in [bioimageio.core](https://github.com/bioimage-io/core-bioimage-io-python#installation) library, which extends bioimageio.spec with more powerful commands like 'predict'. +* Improve documentation for model description's `documentation` field -## Environment variables +#### bioimageio.spec 0.4.6post1 -| Name | Default | Description | -|---|---|---| -| BIOIMAGEIO_USE_CACHE | "true" | Enables simple URL to file cache. possible, case-insensitive, positive values are: "true", "yes", "1". Any other value is interpreted as "false" | -| BIOIMAGEIO_CACHE_PATH | generated tmp folder | File path for simple URL to file cache; changes of URL source are not detected. | -| BIOIMAGEIO_CACHE_WARNINGS_LIMIT | "3" | Maximum number of warnings generated for simple cache hits. | +* fix enrich_partial_rdf_with_imjoy_plugin (see ) -## Changelog -#### bioimageio.spec 0.4.9 -- small bugixes -- better type hints -- improved tests +#### bioimageio.spec 0.4.5post16 -#### bioimageio.spec 0.4.8post1 -- add `axes` and `eps` to `scale_mean_var` +* fix rdf_update of entries in `resolve_collection_entries()` -#### bioimageio.spec 0.4.7post1 -- add simple forward compatibility by treating future format versions as latest known (for the respective resource type) +#### bioimageio.spec 0.4.5post15 -#### bioimageio.spec 0.4.6post3 -- Make CLI output more readable -- find redirected URLs when checking for URL availability +* pass root to `enrich_partial_rdf` arg of `resolve_collection_entries()` -#### bioimageio.spec 0.4.6post2 -- Improve error message for non-existing RDF file path given as string -- Improve documentation for model RDF's `documentation` field +#### bioimageio.spec 0.4.5post14 -#### bioimageio.spec 0.4.6post1 -- fix enrich_partial_rdf_with_imjoy_plugin (see https://github.com/bioimage-io/spec-bioimage-io/pull/452) +* keep `ResourceDescrption.root_path` as URI for remote resources. This fixes the collection description as the collection entries are resolved after the collection description has been loaded. -#### bioimageio.spec 0.4.5post16 -- fix rdf_update of entries in `resolve_collection_entries()` +#### bioimageio.spec 0.4.5post13 -#### bioimageio.spec 0.4.5post15 -- pass root to `enrich_partial_rdf` arg of `resolve_collection_entries()` +* new bioimageio.spec.partner module adding validate-partner-collection command if optional 'lxml' dependency is available -#### bioimageio.spec 0.4.5post14 -- keep `ResourceDescrption.root_path` as URI for remote resources. This fixes the collection RDF as the collection entries are resolved after the collection RDF has been loaded. +#### bioimageio.spec 0.4.5post12 -#### bioimageio.spec 0.4.5post13 -- new bioimageio.spec.partner module adding validate-partner-collection command if optional 'lxml' dependency is available +* new env var `BIOIMAGEIO_CACHE_WARNINGS_LIMIT` (default: 3) to avoid spam from cache hit warnings + +* more robust conversion of ImportableSourceFile for absolute paths to relative paths (don't fail on non-path source file) -#### bioimageio.spec 0.4.5post12 -- new env var `BIOIMAGEIO_CACHE_WARNINGS_LIMIT` (default: 3) to avoid spam from cache hit warnings -- more robust conversion of ImportableSourceFile for absolute paths to relative paths (don't fail on non-path source file) - #### bioimageio.spec 0.4.5post11 -- resolve symlinks when transforming absolute to relative paths during serialization; see [#438](https://github.com/bioimage-io/spec-bioimage-io/pull/438) + +* resolve symlinks when transforming absolute to relative paths during serialization; see [#438](https://github.com/bioimage-io/spec-bioimage-io/pull/438) #### bioimageio.spec 0.4.5post10 -- fix loading of collection RDF with id (id used to be ignored) + +* fix loading of collection description with id (id used to be ignored) #### bioimageio.spec 0.4.5post9 -- support loading bioimageio resources by their animal nickname (currently only models have nicknames). + +* support loading bioimageio resources by their animal nickname (currently only models have nicknames). #### bioimageio.spec 0.4.5post8 -- any field previously expecting a local relative path is now also accepting an absolute path -- load_raw_resource_description returns a raw resource description which has no relative paths (any relative paths are converted to absolute paths). + +* any field previously expecting a local relative path is now also accepting an absolute path + +* load_raw_resource_description returns a raw resource description which has no relative paths (any relative paths are converted to absolute paths). #### bioimageio.spec 0.4.4post7 -- add command `commands.update_rdf()`/`update-rdf`(cli) + +* add command `commands.update_rdf()`/`update-rdf`(cli) #### bioimageio.spec 0.4.4post2 -- fix unresolved ImportableSourceFile + +* fix unresolved ImportableSourceFile #### bioimageio.spec 0.4.4post1 -- fix collection RDF conversion for type field + +* fix collection description conversion for type field #### bioimageio.spec 0.4.3post1 -- fix to shape validation for model RDF 0.4: output shape now needs to be bigger than halo -- moved objects from bioimageio.spec.shared.utils to bioimageio.spec.shared\[.node_transformer\] -- additional keys to validation summary: bioimageio_spec_version, status + +* fix to shape validation for model description 0.4: output shape now needs to be bigger than halo + +* moved objects from bioimageio.spec.shared.utils to bioimageio.spec.shared\[.node_transformer\] +* additional keys to validation summary: bioimageio_spec_version, status #### bioimageio.spec 0.4.2post4 -- fixes to general RDF: - - ignore value of field `root_path` if present in yaml. This field is used internally and always present in RDF nodes. + +* fixes to generic description: + * ignore value of field `root_path` if present in yaml. This field is used internally and always present in RDF nodes. #### bioimageio.spec 0.4.1.post5 -- fixes to collection RDF: - - RDFs specified directly in collection RDF are validated correctly even if their source field does not point to an RDF. - - nesting of collection RDF allowed + +* fixes to collection description: + * RDFs specified directly in collection description are validated correctly even if their source field does not point to an RDF. + * nesting of collection description allowed #### bioimageio.spec 0.4.1.post4 -- fixed missing field `icon` in general RDF's raw node -- fixes to collection RDF: - - RDFs specified directly in collection RDF are validated correctly - - no nesting of collection RDF allowed for now - - `links` is no longer an explicit collection entry field ("moved" to unknown) + +* fixed missing field `icon` in generic description's raw node + +* fixes to collection description: + * RDFs specified directly in collection description are validated correctly + * no nesting of collection description allowed for now + * `links` is no longer an explicit collection entry field ("moved" to unknown) #### bioimageio.spec 0.4.1.post0 -- new model spec 0.3.5 and 0.4.1 + +* new model spec 0.3.5 and 0.4.1 #### bioimageio.spec 0.4.0.post3 -- `load_raw_resource_description` no longer accepts `update_to_current_format` kwarg (use `update_to_format` instead) + +* `load_raw_resource_description` no longer accepts `update_to_current_format` kwarg (use `update_to_format` instead) #### bioimageio.spec 0.4.0.post2 -- `load_raw_resource_description` accepts `update_to_format` kwarg +* `load_raw_resource_description` accepts `update_to_format` kwarg -### RDF Format Versions -#### bioimageio.spec 0.4.9 -- Non-breaking changes - - make pre-/postprocessing kwargs `mode` and `axes` always optional for model RDF 0.3 and 0.4 - -#### model RDF 0.4.8 -- Non-breaking changes - - `cite` field is now optional - -#### RDF 0.2.2 and model RDF 0.4.7 -- Breaking changes that are fully auto-convertible - - name field may not include '/' or '\' (conversion removes these) - -#### model RDF 0.4.6 -- Non-breaking changes - - Implicit output shape can be expanded by inserting `null` into `shape:scale` and indicating length of new dimension D in the `offset` field. Keep in mind that `D=2*'offset'`. - -#### model RDF 0.4.5 -- Breaking changes that are fully auto-convertible - - `parent` field changed to hold a string that is a BioImage.IO ID, a URL or a local relative path (and not subfields `uri` and `sha256`) - -#### model RDF 0.4.4 -- Non-breaking changes - - new optional field `training_data` - -#### dataset RDF 0.2.2 -- Non-breaking changes - - explicitly define and document dataset RDF (for now, clone of general RDF with type="dataset") - -#### model RDF 0.4.3 -- Non-breaking changes - - add optional field `download_url` - - add optional field `dependencies` to all weight formats (not only pytorch_state_dict) - - add optional `pytorch_version` to the pytorch_state_dict and torchscript weight formats - -#### model RDF 0.4.2 -- Bug fixes: - - in a `pytorch_state_dict` weight entry `architecture` is no longer optional. - -#### collection RDF 0.2.2 -- Non-breaking changes - - make `authors`, `cite`, `documentation` and `tags` optional -- Breaking changes that are fully auto-convertible - - Simplifies collection RDF 0.2.1 by merging resource type fields together to a `collection` field, +### Resource Description Format Versions + +#### generic 0.3.0 / application 0.3.0 / collection 0.3.0 / dataset 0.3.0 / notebook 0.3.0 + +* Breaking canges that are fully auto-convertible + * dropped `download_url` + * dropped non-file attachments + * `attachments.files` moved to `attachments.i.source` +* Non-breaking changes + * added optional `parent` field + +#### model 0.5.0 + +all generic 0.3.0 changes (except models already have the `parent` field) plus: + +* Breaking changes that are partially auto-convertible + * `inputs.i.axes` are now defined in more detail (same for `outputs.i.axes`) + * `inputs.i.shape` moved per axes to `inputs.i.axes.size` (same for `outputs.i.shape`) + * new pre-/postprocessing 'fixed_zero_mean_unit_variance' separated from 'zero_mean_unit_variance', where `mode=fixed` is no longer valid. + (for scalar values this is auto-convertible.) +* Breaking changes that are fully auto-convertible + * changes in `weights.pytorch_state_dict.architecture` + * renamed `weights.pytorch_state_dict.architecture.source_file` to `...architecture.source` + * changes in `weights.pytorch_state_dict.dependencies` + * only conda environment allowed and specified by `weights.pytorch_state_dict.dependencies.source` + * new optional field `weights.pytorch_state_dict.dependencies.sha256` + * changes in `weights.tensorflow_model_bundle.dependencies` + * same as changes in `weights.pytorch_state_dict.dependencies` + * moved `test_inputs` to `inputs.i.test_tensor` + * moved `test_outputs` to `outputs.i.test_tensor` + * moved `sample_inputs` to `inputs.i.sample_tensor` + * moved `sample_outputs` to `outputs.i.sample_tensor` + * renamed `inputs.i.name` to `inputs.i.id` + * renamed `outputs.i.name` to `outputs.i.id` + * renamed `inputs.i.preprocessing.name` to `inputs.i.preprocessing.id` + * renamed `outputs.i.postprocessing.name` to `outputs.i.postprocessing.id` +* Non-breaking changes: + * new pre-/postprocessing: `id`='ensure_dtype' with kwarg `dtype` + +#### generic 0.2.4 and model 0.4.10 + +* Breaking changes that are fully auto-convertible + * `id` overwritten with value from `config.bioimageio.nickname` if available +* Non-breaking changes + * `version_number` is a new, optional field indicating that an RDF is the nth published version with a given `id` + * `id_emoji` is a new, optional field (set from `config.bioimageio.nickname_icon` if available) + * `uploader` is a new, optional field with `email` and an optional `name` subfields + +#### model 0.4.9 + +* Non-breaking changes + * make pre-/postprocessing kwargs `mode` and `axes` always optional for model description 0.3 and 0.4 + +#### model 0.4.8 + +* Non-breaking changes + * `cite` field is now optional + +#### generic 0.2.2 and model 0.4.7 + +* Breaking changes that are fully auto-convertible + * name field may not include '/' or '\' (conversion removes these) + +#### model 0.4.6 + +* Non-breaking changes + * Implicit output shape can be expanded by inserting `null` into `shape:scale` and indicating length of new dimension D in the `offset` field. Keep in mind that `D=2*'offset'`. + +#### model 0.4.5 + +* Breaking changes that are fully auto-convertible + * `parent` field changed to hold a string that is a bioimage.io ID, a URL or a local relative path (and not subfields `uri` and `sha256`) + +#### model 0.4.4 + +* Non-breaking changes + * new optional field `training_data` + +#### dataset 0.2.2 + +* Non-breaking changes + * explicitly define and document dataset description (for now, clone of generic description with type="dataset") + +#### model 0.4.3 + +* Non-breaking changes + * add optional field `download_url` + * add optional field `dependencies` to all weight formats (not only pytorch_state_dict) + * add optional `pytorch_version` to the pytorch_state_dict and torchscript weight formats + +#### model 0.4.2 + +* Bug fixes: + * in a `pytorch_state_dict` weight entry `architecture` is no longer optional. + +#### collection 0.2.2 + +* Non-breaking changes + * make `authors`, `cite`, `documentation` and `tags` optional + +* Breaking changes that are fully auto-convertible + * Simplifies collection description 0.2.1 by merging resource type fields together to a `collection` field, holindg a list of all resources in the specified collection. -#### (general) RDF 0.2.2 / model RDF 0.3.6 / model RDF 0.4.2 -- Non-breaking changes - - `rdf_source` new optional field - - `id` new optional field +#### generic 0.2.2 / model 0.3.6 / model 0.4.2 -#### collection RDF 0.2.1 -- First official release, extends general RDF with fields `application`, `model`, `dataset`, `notebook` and (nested) +* Non-breaking changes + * `rdf_source` new optional field + * `id` new optional field + +#### collection 0.2.1 + +* First official release, extends generic description with fields `application`, `model`, `dataset`, `notebook` and (nested) `collection`, which hold lists linking to respective resources. -#### (general) RDF 0.2.1 -- Non-breaking changes - - add optional `email` and `github_user` fields to entries in `authors` - - add optional `maintainers` field (entries like in `authors` but `github_user` is required (and `name` is not)) - -#### model RDF 0.4.1 -- Breaking changes that are fully auto-convertible - - moved field `dependencies` to `weights:pytorch_state_dict:dependencies` -- Non-breaking changes - - `documentation` field accepts URLs as well - -#### model RDF 0.3.5 -- Non-breaking changes - - `documentation` field accepts URLs as well - -#### model RDF 0.4.0 -- Breaking changes - - model inputs and outputs may not use duplicated names. - - model field `sha256` is required if `pytorch_state_dict` weights are defined. +#### generic 0.2.1 + +* Non-breaking changes + * add optional `email` and `github_user` fields to entries in `authors` + * add optional `maintainers` field (entries like in `authors` but `github_user` is required (and `name` is not)) + +#### model 0.4.1 + +* Breaking changes that are fully auto-convertible + * moved field `dependencies` to `weights:pytorch_state_dict:dependencies` + +* Non-breaking changes + * `documentation` field accepts URLs as well + +#### model 0.3.5 + +* Non-breaking changes + * `documentation` field accepts URLs as well + +#### model 0.4.0 + +* Breaking changes + * model inputs and outputs may not use duplicated names. + * model field `sha256` is required if `pytorch_state_dict` weights are defined. and is now moved to the `pytroch_state_dict` entry as `architecture_sha256`. -- Breaking changes that are fully auto-convertible - - model fields language and framework are removed. - - model field `source` is renamed `architecture` and is moved together with `kwargs` to the `pytorch_state_dict` + +* Breaking changes that are fully auto-convertible + * model fields language and framework are removed. + * model field `source` is renamed `architecture` and is moved together with `kwargs` to the `pytorch_state_dict` weights entry (if it exists, otherwise they are removed). - - the weight format `pytorch_script` was renamed to `torchscript`. -- Other changes - - model inputs (like outputs) may be defined by `scale`ing and `offset`ing a `reference_tensor` - - a `maintainers` field was added to the model RDF. - - the entries in the `authors` field may now additionally contain `email` or `github_user`. - - the summary returned by the `validate` command now also contains a list of warnings. - - an `update_format` command was added to aid with updating older RDFs by applying auto-conversion. - -#### model RDF 0.3.4 -- Non-breaking changes - - Add optional parameter `eps` to `scale_range` postprocessing. - -#### model RDF 0.3.3 -- Breaking changes that are fully auto-convertible - - `reference_input` for implicit output tensor shape was renamed to `reference_tensor` - -#### model RDF 0.3.2 -- Breaking changes - - The RDF file name in a package should be `rdf.yaml` for all the RDF (not `model.yaml`); - - Change `authors` and `packaged_by` fields from List[str] to List[Author] with Author consisting of a dictionary `{name: '', affiliation: '', orcid: 'optional orcid id'}`; - - Add a mandatory `type` field to comply with the general RDF. Only valid value is 'model' for model RDF; - - Only allow `license` identifier from the [SPDX license list](https://spdx.org/licenses/); -- Other changes - - Add optional `version` field (default 0.1.0) to keep track of model changes; - - Allow the values in the `attachments` list to be any values besides URI; - + * the weight format `pytorch_script` was renamed to `torchscript`. +* Other changes + * model inputs (like outputs) may be defined by `scale`ing and `offset`ing a `reference_tensor` + * a `maintainers` field was added to the model description. + * the entries in the `authors` field may now additionally contain `email` or `github_user`. + * the summary returned by the `validate` command now also contains a list of warnings. + * an `update_format` command was added to aid with updating older RDFs by applying auto-conversion. + +#### model 0.3.4 + +* Non-breaking changes + * Add optional parameter `eps` to `scale_range` postprocessing. + +#### model 0.3.3 + +* Breaking changes that are fully auto-convertible + * `reference_input` for implicit output tensor shape was renamed to `reference_tensor` + +#### model 0.3.2 + +* Breaking changes + * The RDF file name in a package should be `rdf.yaml` for all the RDF (not `model.yaml`); + * Change `authors` and `packaged_by` fields from List[str] to List[Author] with Author consisting of a dictionary `{name: '', affiliation: '', orcid: 'optional orcid id'}`; + * Add a mandatory `type` field to comply with the generic description. Only valid value is 'model' for model description; + * Only allow `license` identifier from the [SPDX license list](https://spdx.org/licenses/); + +* Non-breaking changes + * Add optional `version` field (default 0.1.0) to keep track of model changes; + * Allow the values in the `attachments` list to be any values besides URI; diff --git a/bioimageio/spec/VERSION b/bioimageio/spec/VERSION index d2b507965..8fd887d4d 100644 --- a/bioimageio/spec/VERSION +++ b/bioimageio/spec/VERSION @@ -1,3 +1,3 @@ { - "version": "0.4.9post5" + "version": "0.5.0" } diff --git a/bioimageio/spec/__init__.py b/bioimageio/spec/__init__.py index 1387a5d6b..6760cfc37 100644 --- a/bioimageio/spec/__init__.py +++ b/bioimageio/spec/__init__.py @@ -1,9 +1,45 @@ -from . import collection, model, rdf, shared -from .commands import update_format, update_rdf, validate -from .io_ import ( - get_resource_package_content, - load_raw_resource_description, - serialize_raw_resource_description, - serialize_raw_resource_description_to_dict, +# """ +# .. include:: ../../README.md +# """ + +from . import application as application +from . import collection as collection +from . import dataset as dataset +from . import generic as generic +from . import model as model +from ._description import LatestResourceDescr as LatestResourceDescr +from ._description import ResourceDescr as ResourceDescr +from ._description import SpecificResourceDescr as SpecificResourceDescr +from ._description import build_description as build_description +from ._description import dump_description as dump_description +from ._description import validate_format as validate_format +from ._internal.common_nodes import InvalidDescr as InvalidDescr +from ._internal.constants import VERSION +from ._internal.validation_context import ValidationContext as ValidationContext +from ._io import load_description as load_description +from ._io import ( + load_description_and_validate_format_only as load_description_and_validate_format_only, ) -from .v import __version__ +from ._io import save_bioimageio_yaml_only as save_bioimageio_yaml_only +from ._package import save_bioimageio_package as save_bioimageio_package +from ._package import ( + save_bioimageio_package_as_folder as save_bioimageio_package_as_folder, +) +from .application import AnyApplicationDescr as AnyApplicationDescr +from .application import ApplicationDescr as ApplicationDescr +from .collection import AnyCollectionDescr as AnyCollectionDescr +from .collection import CollectionDescr as CollectionDescr +from .dataset import AnyDatasetDescr as AnyDatasetDescr +from .dataset import DatasetDescr as DatasetDescr +from .generic import AnyGenericDescr as AnyGenericDescr +from .generic import GenericDescr as GenericDescr +from .model import AnyModelDescr as AnyModelDescr +from .model import ModelDescr as ModelDescr +from .notebook import AnyNotebookDescr as AnyNotebookDescr +from .notebook import NotebookDescr as NotebookDescr +from .pretty_validation_errors import ( + enable_pretty_validation_errors_in_ipynb as enable_pretty_validation_errors_in_ipynb, +) +from .summary import ValidationSummary as ValidationSummary + +__version__ = VERSION diff --git a/bioimageio/spec/__main__.py b/bioimageio/spec/__main__.py deleted file mode 100644 index 95fbc8802..000000000 --- a/bioimageio/spec/__main__.py +++ /dev/null @@ -1,157 +0,0 @@ -import sys -from pathlib import Path -from pprint import pprint -from typing import Any, Callable, Dict, Optional, Union - -import typer - -from bioimageio.spec import __version__, collection, commands, model, rdf -from bioimageio.spec.shared.raw_nodes import URI - -enrich_partial_rdf_with_imjoy_plugin: Optional[Callable[[Dict[str, Any], Union[URI, Path]], Dict[str, Any]]] -try: - from bioimageio.spec.partner.utils import enrich_partial_rdf_with_imjoy_plugin -except ImportError: - enrich_partial_rdf_with_imjoy_plugin = None - partner_help = "" -else: - partner_help = f"\n+\nbioimageio.spec.partner {__version__}\nimplementing:\n\tpartner collection RDF {collection.format_version}" - -help_version = ( - f"bioimageio.spec {__version__}" - "\nimplementing:" - f"\n\tcollection RDF {collection.format_version}" - f"\n\tgeneral RDF {rdf.format_version}" - f"\n\tmodel RDF {model.format_version}" + partner_help -) - -# prevent rewrapping with \b\n: https://click.palletsprojects.com/en/7.x/documentation/#preventing-rewrapping -app = typer.Typer( - help="\b\n" + help_version, - context_settings={"help_option_names": ["-h", "--help", "--version"]}, # make --version display help with version -) # https://typer.tiangolo.com/ - - -@app.command() -def validate( - rdf_source: str = typer.Argument(..., help="RDF source as relative file path or URI"), - update_format: bool = typer.Option( - False, - help="Update format version to the latest (might fail even if source adheres to an old format version). " - "To inform the format update the source may specify fields of future versions in " - "config:future:.", # todo: add future documentation - ), - update_format_inner: bool = typer.Option( - None, help="For collection RDFs only. Defaults to value of 'update-format'." - ), - verbose: bool = typer.Option(False, help="show traceback of unexpected (no ValidationError) exceptions"), -): - summary = commands.validate(rdf_source, update_format, update_format_inner) - if summary["error"] is not None: - print(f"Error in {summary['name']}:") - pprint(summary["error"]) - if verbose: - print("traceback:") - pprint(summary["traceback"]) - ret_code = 1 - else: - print(f"No validation errors for {summary['name']}") - ret_code = 0 - - if summary["warnings"]: - print(f"Validation Warnings for {summary['name']}:") - pprint(summary["warnings"]) - - sys.exit(ret_code) - - -validate.__doc__ = commands.validate.__doc__ - - -if enrich_partial_rdf_with_imjoy_plugin is not None: - - @app.command() - def validate_partner_collection( - rdf_source: str = typer.Argument(..., help="RDF source as relative file path or URI"), - update_format: bool = typer.Option( - False, - help="Update format version to the latest (might fail even if source adheres to an old format version). " - "To inform the format update the source may specify fields of future versions in " - "config:future:.", # todo: add future documentation - ), - update_format_inner: bool = typer.Option( - None, help="For collection RDFs only. Defaults to value of 'update-format'." - ), - verbose: bool = typer.Option(False, help="show traceback of unexpected (no ValidationError) exceptions"), - ): - assert enrich_partial_rdf_with_imjoy_plugin is not None - summary = commands.validate( - rdf_source, update_format, update_format_inner, enrich_partial_rdf=enrich_partial_rdf_with_imjoy_plugin - ) - if summary["error"] is not None: - print(f"Error in {summary['name']}:") - pprint(summary["error"]) - if verbose: - print("traceback:") - pprint(summary["traceback"]) - ret_code = 1 - else: - print(f"No validation errors for {summary['name']}") - ret_code = 0 - - if summary["warnings"]: - print(f"Validation Warnings for {summary['name']}:") - pprint(summary["warnings"]) - - sys.exit(ret_code) - - cmd_doc = commands.validate.__doc__ - assert cmd_doc is not None - validate_partner_collection.__doc__ = ( - "A special version of the bioimageio validate command that enriches the RDFs defined in collections by parsing any " - "associated imjoy plugins. This is implemented using the 'enrich_partial_rdf' of the regular validate command:\n" - + cmd_doc - ) - - -@app.command() -def update_format( - rdf_source: str = typer.Argument(..., help="RDF source as relative file path or URI"), - path: str = typer.Argument(..., help="Path to save the RDF converted to the latest format"), -): - try: - commands.update_format(rdf_source, path) - ret_code = 0 - except Exception as e: - print(f"update-format failed with {e}") - ret_code = 1 - sys.exit(ret_code) - - -update_format.__doc__ = commands.update_format.__doc__ - - -@app.command() -def update_rdf( - source: str = typer.Argument(..., help="relative file path or URI to RDF source"), - update: str = typer.Argument(..., help="relative file path or URI to (partial) RDF as update"), - output: Path = typer.Argument(..., help="Path to save the updated RDF to"), - validate: bool = typer.Option(True, help="Whether or not to validate the updated RDF"), -): - """Update a given RDF with a (partial) RDF-like update""" - try: - commands.update_rdf(source, update, output, validate) - ret_code = 0 - except Exception as e: - print(f"update-rdf failed with {e}") - ret_code = 1 - sys.exit(ret_code) - - -@app.callback() -def callback(): - typer.echo(help_version) - - -if __name__ == "__main__": - app() diff --git a/bioimageio/spec/_build_description.py b/bioimageio/spec/_build_description.py new file mode 100644 index 000000000..81d1214eb --- /dev/null +++ b/bioimageio/spec/_build_description.py @@ -0,0 +1,69 @@ +from typing import Any, Callable, Mapping, Optional, Type, TypeVar, Union + +from ._internal.common_nodes import InvalidDescr, ResourceDescrBase +from ._internal.io import BioimageioYamlContent +from ._internal.types import FormatVersionPlaceholder +from ._internal.validation_context import ValidationContext, validation_context_var +from .generic import GenericDescr + +ResourceDescrT = TypeVar("ResourceDescrT", bound=ResourceDescrBase) + + +DISCOVER: FormatVersionPlaceholder = "discover" +"""placeholder for whatever format version an RDF specifies""" + + +def get_rd_class_impl( + typ: Any, + format_version: Any, + descriptions_map: Mapping[ + Optional[str], Mapping[Optional[str], Type[ResourceDescrT]] + ], +) -> Type[ResourceDescrT]: + assert None in descriptions_map + assert all(None in version_map for version_map in descriptions_map.values()) + assert all( + fv is None or fv.count(".") == 1 + for version_map in descriptions_map.values() + for fv in version_map + ) + if not isinstance(typ, str) or typ not in descriptions_map: + typ = None + + if (ndots := format_version.count(".")) == 0: + use_format_version = format_version + ".0" + elif ndots == 2: + use_format_version = format_version[: format_version.rfind(".")] + else: + use_format_version = None + + descr_versions = descriptions_map[typ] + return descr_versions.get(use_format_version, descr_versions[None]) + + +def build_description_impl( + content: BioimageioYamlContent, + /, + *, + context: Optional[ValidationContext] = None, + format_version: Union[FormatVersionPlaceholder, str] = DISCOVER, + get_rd_class: Callable[[Any, Any], Type[ResourceDescrT]], +) -> Union[ResourceDescrT, InvalidDescr]: + context = context or validation_context_var.get() + if not isinstance(content, dict): + # "Invalid content of type '{type(content)}'" + rd_class = GenericDescr + + typ = content.get("type") + rd_class = get_rd_class(typ, content.get("format_version")) + + rd = rd_class.load(content, context=context) + assert rd.validation_summary is not None + if format_version != DISCOVER and not isinstance(rd, InvalidDescr): + discover_details = rd.validation_summary.details + as_rd_class = get_rd_class(typ, format_version) + rd = as_rd_class.load(content, context=context) + assert rd.validation_summary is not None + rd.validation_summary.details[:0] = discover_details + + return rd diff --git a/bioimageio/spec/_description.py b/bioimageio/spec/_description.py new file mode 100644 index 000000000..2cd208fd1 --- /dev/null +++ b/bioimageio/spec/_description.py @@ -0,0 +1,164 @@ +from types import MappingProxyType +from typing import Any, Literal, Optional, TypeVar, Union + +from pydantic import Discriminator +from typing_extensions import Annotated + +from ._build_description import DISCOVER, build_description_impl, get_rd_class_impl +from ._internal.common_nodes import InvalidDescr +from ._internal.io import BioimageioYamlContent +from ._internal.types import FormatVersionPlaceholder +from ._internal.validation_context import ValidationContext, validation_context_var +from .application import AnyApplicationDescr, ApplicationDescr +from .application.v0_2 import ApplicationDescr as ApplicationDescr02 +from .application.v0_3 import ApplicationDescr as ApplicationDescr03 +from .collection import AnyCollectionDescr, CollectionDescr +from .collection.v0_2 import CollectionDescr as CollectionDescr02 +from .collection.v0_3 import CollectionDescr as CollectionDescr03 +from .dataset import AnyDatasetDescr, DatasetDescr +from .dataset.v0_2 import DatasetDescr as DatasetDescr02 +from .dataset.v0_3 import DatasetDescr as DatasetDescr03 +from .generic import AnyGenericDescr, GenericDescr +from .generic.v0_2 import GenericDescr as GenericDescr02 +from .generic.v0_3 import GenericDescr as GenericDescr03 +from .model import AnyModelDescr, ModelDescr +from .model.v0_4 import ModelDescr as ModelDescr04 +from .model.v0_5 import ModelDescr as ModelDescr05 +from .notebook import AnyNotebookDescr, NotebookDescr +from .notebook.v0_2 import NotebookDescr as NotebookDescr02 +from .notebook.v0_3 import NotebookDescr as NotebookDescr03 +from .summary import ValidationSummary + +LATEST: FormatVersionPlaceholder = "latest" +"""placeholder for the latest available format version""" + + +LatestResourceDescr = Union[ + Annotated[ + Union[ + ApplicationDescr, + CollectionDescr, + DatasetDescr, + ModelDescr, + NotebookDescr, + ], + Discriminator("type"), + ], + GenericDescr, +] +"""A resource description following the latest specification format""" + + +SpecificResourceDescr = Annotated[ + Union[ + AnyApplicationDescr, + AnyCollectionDescr, + AnyDatasetDescr, + AnyModelDescr, + AnyNotebookDescr, + ], + Discriminator("type"), +] +"""Any of the implemented, non-generic resource descriptions""" + +ResourceDescr = Union[SpecificResourceDescr, AnyGenericDescr] +"""Any of the implemented resource descriptions""" + + +def dump_description( + rd: Union[ResourceDescr, InvalidDescr], exclude_unset: bool = True +) -> BioimageioYamlContent: + """Converts a resource to a dictionary containing only simple types that can directly be serialzed to YAML.""" + return rd.model_dump(mode="json", exclude_unset=exclude_unset) + + +RD = TypeVar("RD", bound=ResourceDescr) + + +DESCRIPTIONS_MAP = MappingProxyType( + { + None: MappingProxyType( + { + "0.2": GenericDescr02, + "0.3": GenericDescr03, + None: GenericDescr, + } + ), + "generic": MappingProxyType( + { + "0.2": GenericDescr02, + "0.3": GenericDescr03, + None: GenericDescr, + } + ), + "application": MappingProxyType( + { + "0.2": ApplicationDescr02, + "0.3": ApplicationDescr03, + None: ApplicationDescr, + } + ), + "collection": MappingProxyType( + { + "0.2": CollectionDescr02, + "0.3": CollectionDescr03, + None: CollectionDescr, + } + ), + "dataset": MappingProxyType( + { + "0.2": DatasetDescr02, + "0.3": DatasetDescr03, + None: DatasetDescr, + } + ), + "notebook": MappingProxyType( + { + "0.2": NotebookDescr02, + "0.3": NotebookDescr03, + None: NotebookDescr, + } + ), + "model": MappingProxyType( + { + "0.3": ModelDescr04, + "0.4": ModelDescr04, + "0.5": ModelDescr05, + None: ModelDescr, + } + ), + } +) + + +def _get_rd_class(typ: Any, format_version: Any): + return get_rd_class_impl(typ, format_version, DESCRIPTIONS_MAP) + + +def build_description( + content: BioimageioYamlContent, + /, + *, + context: Optional[ValidationContext] = None, + format_version: Union[FormatVersionPlaceholder, str] = DISCOVER, +) -> Union[ResourceDescr, InvalidDescr]: + return build_description_impl( + content, + context=context, + format_version=format_version, + get_rd_class=_get_rd_class, + ) + + +def validate_format( + data: BioimageioYamlContent, + /, + *, + format_version: Union[Literal["discover", "latest"], str] = DISCOVER, + context: Optional[ValidationContext] = None, +) -> ValidationSummary: + with context or validation_context_var.get(): + rd = build_description(data, format_version=format_version) + + assert rd.validation_summary is not None + return rd.validation_summary diff --git a/bioimageio/spec/_internal/__init__.py b/bioimageio/spec/_internal/__init__.py new file mode 100644 index 000000000..6a9a2c07e --- /dev/null +++ b/bioimageio/spec/_internal/__init__.py @@ -0,0 +1 @@ +from ._settings import settings as settings diff --git a/bioimageio/spec/_internal/_generated_spdx_license_literals.py b/bioimageio/spec/_internal/_generated_spdx_license_literals.py new file mode 100644 index 000000000..af85b6282 --- /dev/null +++ b/bioimageio/spec/_internal/_generated_spdx_license_literals.py @@ -0,0 +1,565 @@ +# This file was generated by scripts/update_spdx_licenses.py +from typing import Literal + +LicenseId = Literal[ + "0BSD", + "AAL", + "Abstyles", + "AdaCore-doc", + "Adobe-2006", + "Adobe-Glyph", + "ADSL", + "AFL-1.1", + "AFL-1.2", + "AFL-2.0", + "AFL-2.1", + "AFL-3.0", + "Afmparse", + "AGPL-1.0-only", + "AGPL-1.0-or-later", + "AGPL-3.0-only", + "AGPL-3.0-or-later", + "Aladdin", + "AMDPLPA", + "AML", + "AMPAS", + "ANTLR-PD", + "ANTLR-PD-fallback", + "Apache-1.0", + "Apache-1.1", + "Apache-2.0", + "APAFML", + "APL-1.0", + "App-s2p", + "APSL-1.0", + "APSL-1.1", + "APSL-1.2", + "APSL-2.0", + "Arphic-1999", + "Artistic-1.0", + "Artistic-1.0-cl8", + "Artistic-1.0-Perl", + "Artistic-2.0", + "ASWF-Digital-Assets-1.0", + "ASWF-Digital-Assets-1.1", + "Baekmuk", + "Bahyph", + "Barr", + "Beerware", + "Bitstream-Charter", + "Bitstream-Vera", + "BitTorrent-1.0", + "BitTorrent-1.1", + "blessing", + "BlueOak-1.0.0", + "Boehm-GC", + "Borceux", + "Brian-Gladman-3-Clause", + "BSD-1-Clause", + "BSD-2-Clause", + "BSD-2-Clause-Patent", + "BSD-2-Clause-Views", + "BSD-3-Clause", + "BSD-3-Clause-Attribution", + "BSD-3-Clause-Clear", + "BSD-3-Clause-LBNL", + "BSD-3-Clause-Modification", + "BSD-3-Clause-No-Military-License", + "BSD-3-Clause-No-Nuclear-License", + "BSD-3-Clause-No-Nuclear-License-2014", + "BSD-3-Clause-No-Nuclear-Warranty", + "BSD-3-Clause-Open-MPI", + "BSD-4-Clause", + "BSD-4-Clause-Shortened", + "BSD-4-Clause-UC", + "BSD-4.3RENO", + "BSD-4.3TAHOE", + "BSD-Advertising-Acknowledgement", + "BSD-Attribution-HPND-disclaimer", + "BSD-Protection", + "BSD-Source-Code", + "BSL-1.0", + "BUSL-1.1", + "bzip2-1.0.6", + "C-UDA-1.0", + "CAL-1.0", + "CAL-1.0-Combined-Work-Exception", + "Caldera", + "CATOSL-1.1", + "CC-BY-1.0", + "CC-BY-2.0", + "CC-BY-2.5", + "CC-BY-2.5-AU", + "CC-BY-3.0", + "CC-BY-3.0-AT", + "CC-BY-3.0-DE", + "CC-BY-3.0-IGO", + "CC-BY-3.0-NL", + "CC-BY-3.0-US", + "CC-BY-4.0", + "CC-BY-NC-1.0", + "CC-BY-NC-2.0", + "CC-BY-NC-2.5", + "CC-BY-NC-3.0", + "CC-BY-NC-3.0-DE", + "CC-BY-NC-4.0", + "CC-BY-NC-ND-1.0", + "CC-BY-NC-ND-2.0", + "CC-BY-NC-ND-2.5", + "CC-BY-NC-ND-3.0", + "CC-BY-NC-ND-3.0-DE", + "CC-BY-NC-ND-3.0-IGO", + "CC-BY-NC-ND-4.0", + "CC-BY-NC-SA-1.0", + "CC-BY-NC-SA-2.0", + "CC-BY-NC-SA-2.0-DE", + "CC-BY-NC-SA-2.0-FR", + "CC-BY-NC-SA-2.0-UK", + "CC-BY-NC-SA-2.5", + "CC-BY-NC-SA-3.0", + "CC-BY-NC-SA-3.0-DE", + "CC-BY-NC-SA-3.0-IGO", + "CC-BY-NC-SA-4.0", + "CC-BY-ND-1.0", + "CC-BY-ND-2.0", + "CC-BY-ND-2.5", + "CC-BY-ND-3.0", + "CC-BY-ND-3.0-DE", + "CC-BY-ND-4.0", + "CC-BY-SA-1.0", + "CC-BY-SA-2.0", + "CC-BY-SA-2.0-UK", + "CC-BY-SA-2.1-JP", + "CC-BY-SA-2.5", + "CC-BY-SA-3.0", + "CC-BY-SA-3.0-AT", + "CC-BY-SA-3.0-DE", + "CC-BY-SA-3.0-IGO", + "CC-BY-SA-4.0", + "CC-PDDC", + "CC0-1.0", + "CDDL-1.0", + "CDDL-1.1", + "CDL-1.0", + "CDLA-Permissive-1.0", + "CDLA-Permissive-2.0", + "CDLA-Sharing-1.0", + "CECILL-1.0", + "CECILL-1.1", + "CECILL-2.0", + "CECILL-2.1", + "CECILL-B", + "CECILL-C", + "CERN-OHL-1.1", + "CERN-OHL-1.2", + "CERN-OHL-P-2.0", + "CERN-OHL-S-2.0", + "CERN-OHL-W-2.0", + "CFITSIO", + "checkmk", + "ClArtistic", + "Clips", + "CMU-Mach", + "CNRI-Jython", + "CNRI-Python", + "CNRI-Python-GPL-Compatible", + "COIL-1.0", + "Community-Spec-1.0", + "Condor-1.1", + "copyleft-next-0.3.0", + "copyleft-next-0.3.1", + "Cornell-Lossless-JPEG", + "CPAL-1.0", + "CPL-1.0", + "CPOL-1.02", + "Crossword", + "CrystalStacker", + "CUA-OPL-1.0", + "Cube", + "curl", + "D-FSL-1.0", + "diffmark", + "DL-DE-BY-2.0", + "DOC", + "Dotseqn", + "DRL-1.0", + "DSDP", + "dtoa", + "dvipdfm", + "ECL-1.0", + "ECL-2.0", + "EFL-1.0", + "EFL-2.0", + "eGenix", + "Elastic-2.0", + "Entessa", + "EPICS", + "EPL-1.0", + "EPL-2.0", + "ErlPL-1.1", + "etalab-2.0", + "EUDatagrid", + "EUPL-1.0", + "EUPL-1.1", + "EUPL-1.2", + "Eurosym", + "Fair", + "FDK-AAC", + "Frameworx-1.0", + "FreeBSD-DOC", + "FreeImage", + "FSFAP", + "FSFUL", + "FSFULLR", + "FSFULLRWD", + "FTL", + "GD", + "GFDL-1.1-invariants-only", + "GFDL-1.1-invariants-or-later", + "GFDL-1.1-no-invariants-only", + "GFDL-1.1-no-invariants-or-later", + "GFDL-1.1-only", + "GFDL-1.1-or-later", + "GFDL-1.2-invariants-only", + "GFDL-1.2-invariants-or-later", + "GFDL-1.2-no-invariants-only", + "GFDL-1.2-no-invariants-or-later", + "GFDL-1.2-only", + "GFDL-1.2-or-later", + "GFDL-1.3-invariants-only", + "GFDL-1.3-invariants-or-later", + "GFDL-1.3-no-invariants-only", + "GFDL-1.3-no-invariants-or-later", + "GFDL-1.3-only", + "GFDL-1.3-or-later", + "Giftware", + "GL2PS", + "Glide", + "Glulxe", + "GLWTPL", + "gnuplot", + "GPL-1.0-only", + "GPL-1.0-or-later", + "GPL-2.0-only", + "GPL-2.0-or-later", + "GPL-3.0-only", + "GPL-3.0-or-later", + "Graphics-Gems", + "gSOAP-1.3b", + "HaskellReport", + "Hippocratic-2.1", + "HP-1986", + "HPND", + "HPND-export-US", + "HPND-Markus-Kuhn", + "HPND-sell-variant", + "HPND-sell-variant-MIT-disclaimer", + "HTMLTIDY", + "IBM-pibs", + "ICU", + "IEC-Code-Components-EULA", + "IJG", + "IJG-short", + "ImageMagick", + "iMatix", + "Imlib2", + "Info-ZIP", + "Inner-Net-2.0", + "Intel", + "Intel-ACPI", + "Interbase-1.0", + "IPA", + "IPL-1.0", + "ISC", + "Jam", + "JasPer-2.0", + "JPL-image", + "JPNIC", + "JSON", + "Kazlib", + "Knuth-CTAN", + "LAL-1.2", + "LAL-1.3", + "Latex2e", + "Latex2e-translated-notice", + "Leptonica", + "LGPL-2.0-only", + "LGPL-2.0-or-later", + "LGPL-2.1-only", + "LGPL-2.1-or-later", + "LGPL-3.0-only", + "LGPL-3.0-or-later", + "LGPLLR", + "Libpng", + "libpng-2.0", + "libselinux-1.0", + "libtiff", + "libutil-David-Nugent", + "LiLiQ-P-1.1", + "LiLiQ-R-1.1", + "LiLiQ-Rplus-1.1", + "Linux-man-pages-1-para", + "Linux-man-pages-copyleft", + "Linux-man-pages-copyleft-2-para", + "Linux-man-pages-copyleft-var", + "Linux-OpenIB", + "LOOP", + "LPL-1.0", + "LPL-1.02", + "LPPL-1.0", + "LPPL-1.1", + "LPPL-1.2", + "LPPL-1.3a", + "LPPL-1.3c", + "LZMA-SDK-9.11-to-9.20", + "LZMA-SDK-9.22", + "MakeIndex", + "Martin-Birgmeier", + "metamail", + "Minpack", + "MirOS", + "MIT", + "MIT-0", + "MIT-advertising", + "MIT-CMU", + "MIT-enna", + "MIT-feh", + "MIT-Festival", + "MIT-Modern-Variant", + "MIT-open-group", + "MIT-Wu", + "MITNFA", + "Motosoto", + "mpi-permissive", + "mpich2", + "MPL-1.0", + "MPL-1.1", + "MPL-2.0", + "MPL-2.0-no-copyleft-exception", + "mplus", + "MS-LPL", + "MS-PL", + "MS-RL", + "MTLL", + "MulanPSL-1.0", + "MulanPSL-2.0", + "Multics", + "Mup", + "NAIST-2003", + "NASA-1.3", + "Naumen", + "NBPL-1.0", + "NCGL-UK-2.0", + "NCSA", + "Net-SNMP", + "NetCDF", + "Newsletr", + "NGPL", + "NICTA-1.0", + "NIST-PD", + "NIST-PD-fallback", + "NIST-Software", + "NLOD-1.0", + "NLOD-2.0", + "NLPL", + "Nokia", + "NOSL", + "Noweb", + "NPL-1.0", + "NPL-1.1", + "NPOSL-3.0", + "NRL", + "NTP", + "NTP-0", + "O-UDA-1.0", + "OCCT-PL", + "OCLC-2.0", + "ODbL-1.0", + "ODC-By-1.0", + "OFFIS", + "OFL-1.0", + "OFL-1.0-no-RFN", + "OFL-1.0-RFN", + "OFL-1.1", + "OFL-1.1-no-RFN", + "OFL-1.1-RFN", + "OGC-1.0", + "OGDL-Taiwan-1.0", + "OGL-Canada-2.0", + "OGL-UK-1.0", + "OGL-UK-2.0", + "OGL-UK-3.0", + "OGTSL", + "OLDAP-1.1", + "OLDAP-1.2", + "OLDAP-1.3", + "OLDAP-1.4", + "OLDAP-2.0", + "OLDAP-2.0.1", + "OLDAP-2.1", + "OLDAP-2.2", + "OLDAP-2.2.1", + "OLDAP-2.2.2", + "OLDAP-2.3", + "OLDAP-2.4", + "OLDAP-2.5", + "OLDAP-2.6", + "OLDAP-2.7", + "OLDAP-2.8", + "OLFL-1.3", + "OML", + "OpenPBS-2.3", + "OpenSSL", + "OPL-1.0", + "OPL-UK-3.0", + "OPUBL-1.0", + "OSET-PL-2.1", + "OSL-1.0", + "OSL-1.1", + "OSL-2.0", + "OSL-2.1", + "OSL-3.0", + "Parity-6.0.0", + "Parity-7.0.0", + "PDDL-1.0", + "PHP-3.0", + "PHP-3.01", + "Plexus", + "PolyForm-Noncommercial-1.0.0", + "PolyForm-Small-Business-1.0.0", + "PostgreSQL", + "PSF-2.0", + "psfrag", + "psutils", + "Python-2.0", + "Python-2.0.1", + "Qhull", + "QPL-1.0", + "QPL-1.0-INRIA-2004", + "Rdisc", + "RHeCos-1.1", + "RPL-1.1", + "RPL-1.5", + "RPSL-1.0", + "RSA-MD", + "RSCPL", + "Ruby", + "SAX-PD", + "Saxpath", + "SCEA", + "SchemeReport", + "Sendmail", + "Sendmail-8.23", + "SGI-B-1.0", + "SGI-B-1.1", + "SGI-B-2.0", + "SGP4", + "SHL-0.5", + "SHL-0.51", + "SimPL-2.0", + "SISSL", + "SISSL-1.2", + "Sleepycat", + "SMLNJ", + "SMPPL", + "SNIA", + "snprintf", + "Spencer-86", + "Spencer-94", + "Spencer-99", + "SPL-1.0", + "SSH-OpenSSH", + "SSH-short", + "SSPL-1.0", + "SugarCRM-1.1.3", + "SunPro", + "SWL", + "Symlinks", + "TAPR-OHL-1.0", + "TCL", + "TCP-wrappers", + "TermReadKey", + "TMate", + "TORQUE-1.1", + "TOSL", + "TPDL", + "TPL-1.0", + "TTWL", + "TU-Berlin-1.0", + "TU-Berlin-2.0", + "UCAR", + "UCL-1.0", + "Unicode-DFS-2015", + "Unicode-DFS-2016", + "Unicode-TOU", + "UnixCrypt", + "Unlicense", + "UPL-1.0", + "Vim", + "VOSTROM", + "VSL-1.0", + "W3C", + "W3C-19980720", + "W3C-20150513", + "w3m", + "Watcom-1.0", + "Widget-Workshop", + "Wsuipa", + "WTFPL", + "X11", + "X11-distribute-modifications-variant", + "Xdebug-1.03", + "Xerox", + "Xfig", + "XFree86-1.1", + "xinetd", + "xlock", + "Xnet", + "xpp", + "XSkat", + "YPL-1.0", + "YPL-1.1", + "Zed", + "Zend-2.0", + "Zimbra-1.3", + "Zimbra-1.4", + "Zlib", + "zlib-acknowledgement", + "ZPL-1.1", + "ZPL-2.0", + "ZPL-2.1", +] + +DeprecatedLicenseId = Literal[ + "AGPL-1.0", + "AGPL-3.0", + "BSD-2-Clause-FreeBSD", + "BSD-2-Clause-NetBSD", + "bzip2-1.0.5", + "eCos-2.0", + "GFDL-1.1", + "GFDL-1.2", + "GFDL-1.3", + "GPL-1.0", + "GPL-1.0+", + "GPL-2.0", + "GPL-2.0+", + "GPL-2.0-with-autoconf-exception", + "GPL-2.0-with-bison-exception", + "GPL-2.0-with-classpath-exception", + "GPL-2.0-with-font-exception", + "GPL-2.0-with-GCC-exception", + "GPL-3.0", + "GPL-3.0+", + "GPL-3.0-with-autoconf-exception", + "GPL-3.0-with-GCC-exception", + "LGPL-2.0", + "LGPL-2.0+", + "LGPL-2.1", + "LGPL-2.1+", + "LGPL-3.0", + "LGPL-3.0+", + "Nunit", + "StandardML-NJ", + "wxWindows", +] diff --git a/bioimageio/spec/_internal/_settings.py b/bioimageio/spec/_internal/_settings.py new file mode 100644 index 000000000..25615eb5f --- /dev/null +++ b/bioimageio/spec/_internal/_settings.py @@ -0,0 +1,44 @@ +from typing import Optional + +from pydantic import Field +from pydantic_settings import BaseSettings, SettingsConfigDict +from typing_extensions import Annotated + + +class Settings(BaseSettings, extra="ignore"): + """environment variables for bioimageio.spec""" + + model_config = SettingsConfigDict( + env_prefix="BIOIMAGEIO_", env_file=".env", env_file_encoding="utf-8" + ) + + github_username: Optional[str] = None + """GitHub username for API requests""" + + github_token: Optional[str] = None + """GitHub token for API requests""" + + log_warnings: bool = True + """log validation warnings to console""" + + perform_io_checks: bool = True + """wether or not to perform validation that requires file io, + e.g. downloading a remote files. + + Existence of local absolute file paths is still being checked.""" + + CI: Annotated[bool, Field(alias="CI")] = False + """wether or not the execution happens in a continuous integration (CI) environment""" + + user_agent: Optional[str] = None + """user agent for http requests""" + + @property + def github_auth(self): + if self.github_username is None or self.github_token is None: + return None + else: + return (self.github_username, self.github_token) + + +settings = Settings() diff --git a/bioimageio/spec/_internal/common_nodes.py b/bioimageio/spec/_internal/common_nodes.py new file mode 100644 index 000000000..052daae4e --- /dev/null +++ b/bioimageio/spec/_internal/common_nodes.py @@ -0,0 +1,480 @@ +from __future__ import annotations + +import collections.abc +import traceback +from abc import ABC, abstractmethod +from copy import deepcopy +from typing import ( + TYPE_CHECKING, + Any, + ClassVar, + Dict, + Final, + FrozenSet, + Generic, + List, + Optional, + Protocol, + Tuple, + Type, + Union, + cast, + get_type_hints, +) + +import pydantic +from pydantic import ( + DirectoryPath, + Field, + GetCoreSchemaHandler, + PrivateAttr, + StringConstraints, + TypeAdapter, + model_validator, +) +from pydantic_core import PydanticUndefined, core_schema +from typing_extensions import ( + Annotated, + LiteralString, + Self, + TypeVar, + TypeVarTuple, + Unpack, +) + +from ..summary import ( + WARNING_LEVEL_TO_NAME, + ErrorEntry, + ValidationDetail, + ValidationSummary, + WarningEntry, +) +from .field_warning import issue_warning +from .io import BioimageioYamlContent +from .node import Node as Node +from .url import HttpUrl +from .utils import assert_all_params_set_explicitly +from .validation_context import ( + ValidationContext, + validation_context_var, +) +from .warning_levels import ALERT, ERROR, INFO + + +class StringNode(collections.UserString, ABC): + """deprecated! don't use for new spec fields!""" + + _pattern: ClassVar[str] + _node_class: Type[Node] + _node: Optional[Node] = None + + def __init__(self: Self, seq: object) -> None: + super().__init__(seq) + type_hints = { + fn: t + for fn, t in get_type_hints(self.__class__).items() + if not fn.startswith("_") + } + defaults = {fn: getattr(self.__class__, fn, Field()) for fn in type_hints} + field_definitions: Dict[str, Any] = { + fn: (t, defaults[fn]) for fn, t in type_hints.items() + } + self._node_class = pydantic.create_model( + self.__class__.__name__, + __base__=Node, + __module__=self.__module__, + **field_definitions, + ) + + # freeze after initialization + def __setattr__(self: Self, __name: str, __value: Any): # type: ignore + raise AttributeError(f"{self} is immutable.") + + self.__setattr__ = __setattr__ # type: ignore + + @property + def model_fields(self): + return self._node_class.model_fields + + def __getattr__(self, name: str): + if name in self._node_class.model_fields: + if self._node is None: + raise AttributeError(f"{name} only available after validation") + + return getattr(self._node, name) + + raise AttributeError(name) + + @classmethod + def __get_pydantic_core_schema__( + cls, source: Type[Any], handler: GetCoreSchemaHandler + ) -> core_schema.CoreSchema: + assert issubclass(source, StringNode), source + return core_schema.no_info_after_validator_function( + cls._validate, + core_schema.str_schema(pattern=cls._pattern), + serialization=core_schema.plain_serializer_function_ser_schema( + cls._serialize, + info_arg=False, + return_schema=core_schema.str_schema(), + ), + ) + + @classmethod + def _get_data(cls, valid_string_data: str) -> Dict[str, Any]: + raise NotImplementedError(f"{cls.__name__}._get_data()") + + @classmethod + def _validate(cls, value: str) -> Self: + contrained_str_type = Annotated[str, StringConstraints(pattern=cls._pattern)] + contrained_str_adapter = TypeAdapter(cast(str, contrained_str_type)) + valid_string_data = contrained_str_adapter.validate_python(value) + data = cls._get_data(valid_string_data) + self = cls(valid_string_data) + object.__setattr__(self, "_node", self._node_class.model_validate(data)) + return self + + def _serialize(self) -> str: + # serialize inner node to call _package when needed + if self._node is not None: + _ = self._node.model_dump(mode="json") + + return self.data + + +SRC = TypeVar("SRC", bound=Union[Node, StringNode]) +TGT = TypeVar("TGT", bound=Node) + + +# converter without any additional args or kwargs: +# class Converter(Generic[SRC, TGT], ABC): +# # src: ClassVar[Type[SRC]] +# # tgt: ClassVar[Type[TGT]] +# # note: the above is not yet possible, see https://github.com/python/typing/discussions/1424 +# # we therefore use an instance +# def __init__(self, src: Type[SRC], tgt: Type[TGT], /): +# super().__init__() +# self.src: Final[Type[SRC]] = src +# self.tgt: Final[Type[TGT]] = tgt + +# @abstractmethod +# def _convert(self, src: SRC, tgt: "type[TGT | dict[str, Any]] ", /) -> "TGT | dict[str, Any]": +# ... + +# def convert(self, source: SRC, /) -> TGT: +# """convert `source` node + +# Args: +# source: A bioimageio description node + +# Raises: +# ValidationError: conversion failed +# """ +# data = self.convert_as_dict(source) +# return assert_all_params_set_explicitly(self.tgt)(**data) + +# def convert_as_dict(self, source: SRC) -> Dict[str, Any]: +# return cast(Dict[str, Any], self._convert(source, dict)) + + +# A TypeVar bound to a TypedDict seemed like a good way to add converter kwargs: +# ``` +# class ConverterKwargs(TypedDict): +# pass +# KW = TypeVar("KW", bound=ConverterKwargs, default=ConverterKwargs) +# ``` +# sadly we cannot use a TypeVar bound to TypedDict and then unpack it in the Converter methods, +# see https://github.com/python/typing/issues/1399 +# Therefore we use a TypeVarTuple and positional only args instead +# (We are avoiding ParamSpec for its ambiguity 'args vs kwargs') +CArgs = TypeVarTuple("CArgs") + + +class Converter(Generic[SRC, TGT, Unpack[CArgs]], ABC): + # src: ClassVar[Type[SRC]] + # tgt: ClassVar[Type[TGT]] + # note: the above is not yet possible, see https://github.com/python/typing/discussions/1424 + # we therefore use an instance + def __init__(self, src: Type[SRC], tgt: Type[TGT], /): + super().__init__() + self.src: Final[Type[SRC]] = src + self.tgt: Final[Type[TGT]] = tgt + + @abstractmethod + def _convert( + self, src: SRC, tgt: "type[TGT | dict[str, Any]]", /, *args: Unpack[CArgs] + ) -> "TGT | dict[str, Any]": ... + + # note: the following is not (yet) allowed, see https://github.com/python/typing/issues/1399 + # we therefore use `kwargs` (and not `**kwargs`) + # def convert(self, source: SRC, /, **kwargs: Unpack[KW]) -> TGT: + def convert(self, source: SRC, /, *args: Unpack[CArgs]) -> TGT: + """convert `source` node + + Args: + source: A bioimageio description node + + Raises: + ValidationError: conversion failed + """ + data = self.convert_as_dict(source, *args) + return assert_all_params_set_explicitly(self.tgt)(**data) + + def convert_as_dict(self, source: SRC, /, *args: Unpack[CArgs]) -> Dict[str, Any]: + return cast(Dict[str, Any], self._convert(source, dict, *args)) + + +class NodeWithExplicitlySetFields(Node): + fields_to_set_explicitly: ClassVar[FrozenSet[LiteralString]] = frozenset() + """set set these fields explicitly with their default value if they are not set, + such that they are always included even when dumping with 'exlude_unset'""" + + @model_validator(mode="before") + @classmethod + def set_fields_explicitly( + cls, data: Union[Any, Dict[str, Any]] + ) -> Union[Any, Dict[str, Any]]: + if isinstance(data, dict): + for name in cls.fields_to_set_explicitly: + if name not in data: + data[name] = cls.model_fields[name].get_default( + call_default_factory=True + ) + + return data + + +if TYPE_CHECKING: + + class _ResourceDescrBaseAbstractFieldsProtocol(Protocol): + """workaround to add abstract fields to ResourceDescrBase""" + + # TODO: implement as proper abstract fields of ResourceDescrBase + + type: Any # should be LiteralString + format_version: Any # should be LiteralString + +else: + + class _ResourceDescrBaseAbstractFieldsProtocol: + pass + + +class ResourceDescrBase( + NodeWithExplicitlySetFields, ABC, _ResourceDescrBaseAbstractFieldsProtocol +): + """base class for all resource descriptions""" + + _validation_summary: Optional[ValidationSummary] = None + + fields_to_set_explicitly: ClassVar[FrozenSet[LiteralString]] = frozenset( + {"type", "format_version"} + ) + implemented_format_version: ClassVar[str] + implemented_format_version_tuple: ClassVar[Tuple[int, int, int]] + + # @field_validator("format_version", mode="before", check_fields=False) + # field_validator on "format_version" is not possible, because we want to use + # "format_version" in a descriminated Union higher up + # (PydanticUserError: Cannot use a mode='before' validator in the discriminator + # field 'format_version' of Model 'CollectionDescr') + @model_validator(mode="before") + @classmethod + def _ignore_future_patch(cls, data: Union[Dict[Any, Any], Any], /) -> Any: + if not isinstance(data, dict) or "format_version" not in data: + return data + + value = data["format_version"] + + def get_maj(v: str): + parts = v.split(".") + if parts and (p := parts[0]).isdecimal(): + return int(p) + else: + return 0 + + def get_min_patch(v: str): + parts = v.split(".") + if len(parts) == 3: + _, m, p = parts + if m.isdecimal() and p.isdecimal(): + return int(m), int(p) + + return (0, 0) + + if ( + cls.implemented_format_version != "unknown" + and value != cls.implemented_format_version + and isinstance(value, str) + and value.count(".") == 2 + and get_maj(value) == cls.implemented_format_version_tuple[0] + and get_min_patch(value) > cls.implemented_format_version_tuple[1:] + ): + issue_warning( + "future format_version '{value}' treated as '{implemented}'", + value=value, + msg_context=dict(implemented=cls.implemented_format_version), + severity=ALERT, + ) + data["format_version"] = cls.implemented_format_version + + return data + + @model_validator(mode="after") + def _set_init_validation_summary(self): + context = validation_context_var.get() + self._validation_summary = ValidationSummary( + name="bioimageio validation", + source_name=context.source_name, + status="passed", + details=[ + ValidationDetail( + name=f"initialized {self.type} {self.implemented_format_version}", + status="passed", + ) + ], + ) + return self + + @property + def validation_summary(self) -> ValidationSummary: + assert self._validation_summary is not None, "access only after initialization" + return self._validation_summary + + _root: Union[HttpUrl, DirectoryPath] = PrivateAttr( + default_factory=lambda: validation_context_var.get().root + ) + + @property + def root(self) -> Union[HttpUrl, DirectoryPath]: + return self._root + + @classmethod + def __pydantic_init_subclass__(cls, **kwargs: Any): + super().__pydantic_init_subclass__(**kwargs) + if ( + "format_version" in cls.model_fields + and cls.model_fields["format_version"].default is not PydanticUndefined + ): + cls.implemented_format_version = cls.model_fields["format_version"].default + if "." not in cls.implemented_format_version: + cls.implemented_format_version_tuple = (0, 0, 0) + else: + cls.implemented_format_version_tuple = cast( + Tuple[int, int, int], + tuple(int(x) for x in cls.implemented_format_version.split(".")), + ) + assert ( + len(cls.implemented_format_version_tuple) == 3 + ), cls.implemented_format_version_tuple + + @classmethod + def load( + cls, data: BioimageioYamlContent, context: Optional[ValidationContext] = None + ) -> Union[Self, InvalidDescr]: + context = context or validation_context_var.get() + assert isinstance(data, dict) + with context.replace(log_warnings=False): # don't log warnings to console + rd, errors, val_warnings = cls._load_impl(deepcopy(data)) + + if context.warning_level > INFO: + all_warnings_context = context.replace(warning_level=INFO) + # raise all validation warnings by reloading + with all_warnings_context: + _, _, val_warnings = cls._load_impl(deepcopy(data)) + + rd.validation_summary.add_detail( + ValidationDetail( + errors=errors, + name=( + "bioimageio.spec format validation" + f" {rd.type} {cls.implemented_format_version}" + ), + status="failed" if errors else "passed", + warnings=val_warnings, + ) + ) + + return rd + + @classmethod + def _load_impl( + cls, data: BioimageioYamlContent + ) -> Tuple[Union[Self, InvalidDescr], List[ErrorEntry], List[WarningEntry]]: + rd: Union[Self, InvalidDescr, None] = None + val_errors: List[ErrorEntry] = [] + val_warnings: List[WarningEntry] = [] + + try: + rd = cls.model_validate(data) + except pydantic.ValidationError as e: + for ee in e.errors(include_url=False): + if (severity := ee.get("ctx", {}).get("severity", ERROR)) < ERROR: + val_warnings.append( + WarningEntry( + loc=ee["loc"], + msg=ee["msg"], + type=ee["type"], + severity=severity, + ) + ) + else: + val_errors.append( + ErrorEntry(loc=ee["loc"], msg=ee["msg"], type=ee["type"]) + ) + + if len(val_errors) == 0: + val_errors.append( + ErrorEntry( + loc=(), + msg=( + f"Encountered {len(val_warnings)} more severe than warning" + " level " + f"'{WARNING_LEVEL_TO_NAME[validation_context_var.get().warning_level]}'" + ), + type="severe_warnings", + ) + ) + except Exception as e: + val_errors.append( + ErrorEntry( + loc=(), + msg=str(e), + type=type(e).__name__, + traceback=traceback.format_tb(e.__traceback__), + ) + ) + + if rd is None: + try: + rd = InvalidDescr.model_validate(data) + except Exception: + resource_type = cls.model_fields["type"].default + format_version = cls.implemented_format_version + rd = InvalidDescr(type=resource_type, format_version=format_version) + + return rd, val_errors, val_warnings + + +class InvalidDescr( + ResourceDescrBase, + extra="allow", + title="An invalid resource description", +): + type: Any = "unknown" + format_version: Any = "unknown" + fields_to_set_explicitly: ClassVar[FrozenSet[LiteralString]] = frozenset() + + +class KwargsNode(Node): + def get(self, item: str, default: Any = None) -> Any: + return self[item] if item in self else default + + def __getitem__(self, item: str) -> Any: + if item in self.model_fields: + return getattr(self, item) + else: + raise KeyError(item) + + def __contains__(self, item: str) -> int: + return item in self.model_fields diff --git a/bioimageio/spec/_internal/constants.py b/bioimageio/spec/_internal/constants.py new file mode 100644 index 000000000..7d660039c --- /dev/null +++ b/bioimageio/spec/_internal/constants.py @@ -0,0 +1,94 @@ +from __future__ import annotations + +import json +from types import MappingProxyType +from typing import Mapping, NamedTuple, Sequence, Set, Union + +from .utils import files + +with files("bioimageio.spec").joinpath("VERSION").open("r", encoding="utf-8") as f: + VERSION: str = json.load(f)["version"] + assert isinstance(VERSION, str), VERSION + + +DOI_REGEX = ( # lax DOI regex validating the first 7 DOI characters only + r"^10\.[0-9]{4}.+$" +) + +IN_PACKAGE_MESSAGE = "โˆˆ๐Ÿ“ฆ " +"""DEPRECATED, use ImportantFileSource to indicate that a file source should be included in a package""" + +# license file generated with scripts/update_spdx_licenses.py +with files("bioimageio.spec").joinpath("static/spdx_licenses.json").open( + "r", encoding="utf-8" +) as f: + _license_data = json.load(f) + + +SHA256_HINT = """You can drag and drop your file to this +[online tool](http://emn178.github.io/online-tools/sha256_checksum.html) to generate a SHA256 in your browser. +Or you can generate a SHA256 checksum with Python's `hashlib`, +[here is a codesnippet](https://gist.github.com/FynnBe/e64460463df89439cff218bbf59c1100).""" + +with files("bioimageio.spec").joinpath("static/tag_categories.json").open( + "r", encoding="utf-8" +) as f: + TAG_CATEGORIES: Mapping[str, Mapping[str, Sequence[str]]] = json.load(f) + +# SI unit regex adapted from https://stackoverflow.com/a/3573731 +_prefix = "(Q|R|Y|Z|E|P|T|G|M|k|h|da|d|c|m|ยต|n|p|f|a|z|y|r|q)" +_unit = "(m|g|s|A|K|mol|cd|Hz|N|Pa|J|W|C|V|F|ฮฉ|S|Wb|T|H|lm|lx|Bq|Gy|Sv|kat|l|L)" +_any_power = r"(\^[+-]?[1-9]\d*)" +_pos_power = r"(\^+?[1-9]\d*)" +_unit_ap = f"{_prefix}?{_unit}{_any_power}?" +_unit_pp = f"{_prefix}?{_unit}{_pos_power}?" +SI_UNIT_REGEX = f"^{_unit_ap}((ยท{_unit_ap})|(/{_unit_pp}))*$" + + +class MinMax(NamedTuple): + min: Union[int, float] + max: Union[int, float] + + +# numpy.dtype limits; see scripts/generate_dtype_limits.py +DTYPE_LIMITS = MappingProxyType( + { + "float32": MinMax(-3.4028235e38, 3.4028235e38), + "float64": MinMax(-1.7976931348623157e308, 1.7976931348623157e308), + "uint8": MinMax(0, 255), + "int8": MinMax(-128, 127), + "uint16": MinMax(0, 65535), + "int16": MinMax(-32768, 32767), + "uint32": MinMax(0, 4294967295), + "int32": MinMax(-2147483648, 2147483647), + "uint64": MinMax(0, 18446744073709551615), + "int64": MinMax(-9223372036854775808, 9223372036854775807), + } +) + +# TODO: cache/store known gh users in file +KNOWN_GH_USERS: Set[str] = { + "clementcaporal", + "donglaiw", + "jansanrom", + "pedgomgal1", + "aaitorg", + "bioimageiobot", + "carlosuc3m", + "cfusterbarcelo", + "constantinpape", + "ctr26", + "danifranco", + "esgomezm", + "fynnbe", + "githubuser2", + "iarganda", + "ivanhcenalmor", + "jansanrom", + "k-dominik", + "lenkaback", + "oeway", +} +N_KNOWN_GH_USERS = len(KNOWN_GH_USERS) +KNOWN_INVALID_GH_USERS: Set[str] = {"arratemunoz", "lmescu"} +N_KNOWN_INVALID_GH_USERS = len(KNOWN_INVALID_GH_USERS) diff --git a/bioimageio/spec/shared/utils/_docs.py b/bioimageio/spec/_internal/docs_utils.py similarity index 51% rename from bioimageio/spec/shared/utils/_docs.py rename to bioimageio/spec/_internal/docs_utils.py index 123256bda..96298eb36 100644 --- a/bioimageio/spec/shared/utils/_docs.py +++ b/bioimageio/spec/_internal/docs_utils.py @@ -1,54 +1,55 @@ import ast import sys import warnings -from typing import Callable, Union +from typing import Literal from urllib.parse import urlparse -try: - from typing import Literal -except ImportError: - from typing_extensions import Literal # type: ignore - -def snake_case_to_camel_case(string: str) -> str: - return "".join([s.title() for s in string.split("_")]) - - -def get_ref_url(type_: Literal["class", "function"], name: str, github_file_url: str) -> str: +def get_ref_url( + type_: Literal["class", "function"], name: str, github_file_url: str +) -> str: """get github url with line range fragment to reference implementation from non-raw github file url example: >>> get_ref_url( - "class", - "Binarize", - "https://github.com/bioimage-io/core-bioimage-io-python/blob/main/bioimageio/core/prediction_pipeline/_processing.py" - ) - https://github.com/bioimage-io/core-bioimage-io-python/blob/main/bioimageio/core/prediction_pipeline/_processing.py#L107-L112 + ... "class", + ... "Binarize", + ... "https://github.com/bioimage-io/core-bioimage-io-python/blob/main/bioimageio/core/prediction_pipeline/_processing.py" + ... ) + 'https://github.com/bioimage-io/core-bioimage-io-python/blob/main/bioimageio/core/prediction_pipeline/_processing.py#L120-L127' """ # hotfix to handle requests not available in pyodide, see # https://github.com/bioimage-io/bioimage.io/issues/216#issuecomment-1012422194 try: import requests # not available in pyodide except Exception: - warnings.warn(f"Could not reslove {github_file_url} because requests library is not available.") + warnings.warn( + f"Could not reslove {github_file_url} because requests library is not" + + " available." + ) return "URL NOT RESOLVED" assert not urlparse(github_file_url).fragment, "unexpected url fragment" look_for = {"class": ast.ClassDef, "function": ast.FunctionDef}[type_] - raw_github_file_url = github_file_url.replace("github.com", "raw.githubusercontent.com").replace("/blob/", "/") + raw_github_file_url = github_file_url.replace( + "github.com", "raw.githubusercontent.com" + ).replace("/blob/", "/") try: code = requests.get(raw_github_file_url).text except requests.RequestException as e: - warnings.warn(f"Could not resolve {github_file_url} due to {e}. Please check your internet connection.") + warnings.warn( + f"Could not resolve {github_file_url} due to {e}. Please check your" + + " internet connection." + ) return "URL NOT RESOLVED" tree = ast.parse(code) for d in tree.body: if isinstance(d, look_for): assert hasattr(d, "name") - if d.name == name: # type: ignore + if d.name == name: assert hasattr(d, "decorator_list") - start = d.decorator_list[0].lineno if d.decorator_list else d.lineno # type: ignore + start = d.decorator_list[0].lineno if d.decorator_list else d.lineno if sys.version_info >= (3, 8): stop = d.end_lineno else: @@ -58,10 +59,3 @@ def get_ref_url(type_: Literal["class", "function"], name: str, github_file_url: raise ValueError(f"{type_} {name} not found in {github_file_url}") return f"{github_file_url}#L{start}-L{stop}" - - -def resolve_bioimageio_descrcription(descr: Union[Callable[[], str], str]): - if callable(descr): - return descr() - else: - return descr diff --git a/bioimageio/spec/_internal/field_validation.py b/bioimageio/spec/_internal/field_validation.py new file mode 100644 index 000000000..dca6f8ece --- /dev/null +++ b/bioimageio/spec/_internal/field_validation.py @@ -0,0 +1,148 @@ +from __future__ import annotations + +import collections.abc +import dataclasses +import sys +from dataclasses import dataclass +from datetime import date, datetime +from typing import ( + Any, + Dict, + Hashable, + Mapping, + Sequence, + Type, + Union, +) + +import annotated_types +import requests +from pydantic import GetCoreSchemaHandler, functional_validators +from pydantic_core.core_schema import CoreSchema, no_info_after_validator_function + +from bioimageio.spec._internal._settings import settings +from bioimageio.spec._internal.constants import KNOWN_GH_USERS, KNOWN_INVALID_GH_USERS +from bioimageio.spec._internal.field_warning import issue_warning +from bioimageio.spec._internal.validation_context import validation_context_var + +if sys.version_info < (3, 10): + SLOTS: Dict[str, bool] = {} + KW_ONLY: Dict[str, bool] = {} +else: + SLOTS = {"slots": True} + KW_ONLY = {"kw_only": True} + + +@dataclasses.dataclass(frozen=True, **SLOTS) +class RestrictCharacters: + alphabet: str + + def __get_pydantic_core_schema__( + self, source: Type[Any], handler: GetCoreSchemaHandler + ) -> CoreSchema: + if not self.alphabet: + raise ValueError("Alphabet may not be empty") + schema = handler(source) # get the CoreSchema from the type / inner constraints + if schema["type"] != "str": + raise TypeError("RestrictCharacters can only be applied to strings") + return no_info_after_validator_function( + self.validate, + schema, + ) + + def validate(self, value: str) -> str: + if any(c not in self.alphabet for c in value): + raise ValueError(f"{value!r} is not restricted to {self.alphabet!r}") + return value + + +def is_valid_yaml_leaf_value(value: Any) -> bool: + return value is None or isinstance(value, (bool, date, datetime, int, float, str)) + + +def is_valid_yaml_key(value: Union[Any, Sequence[Any]]) -> bool: + return ( + is_valid_yaml_leaf_value(value) + or isinstance(value, tuple) + and all(is_valid_yaml_leaf_value(v) for v in value) + ) + + +def is_valid_yaml_mapping(value: Union[Any, Mapping[Any, Any]]) -> bool: + return isinstance(value, collections.abc.Mapping) and all( + is_valid_yaml_key(k) and is_valid_yaml_value(v) for k, v in value.items() + ) + + +def is_valid_yaml_sequence(value: Union[Any, Sequence[Any]]) -> bool: + return isinstance(value, collections.abc.Sequence) and all( + is_valid_yaml_value(v) for v in value + ) + + +def is_valid_yaml_value(value: Any) -> bool: + return any( + is_valid(value) + for is_valid in ( + is_valid_yaml_key, + is_valid_yaml_mapping, + is_valid_yaml_sequence, + ) + ) + + +def validate_unique_entries(seq: Sequence[Hashable]): + if len(seq) != len(set(seq)): + raise ValueError("Entries are not unique.") + return seq + + +# TODO: make sure we use this one everywhere and not the vanilla pydantic one +@dataclass(frozen=True, **SLOTS) +class AfterValidator(functional_validators.AfterValidator): + def __str__(self): + return f"AfterValidator({self.func.__name__})" + + +# TODO: make sure we use this one everywhere and not the vanilla pydantic one +@dataclass(frozen=True, **SLOTS) +class BeforeValidator(functional_validators.BeforeValidator): + def __str__(self): + return f"BeforeValidator({self.func.__name__})" + + +# TODO: make sure we use this one everywhere and not the vanilla pydantic one +@dataclass(frozen=True, **SLOTS) +class Predicate(annotated_types.Predicate): + def __str__(self): + return f"Predicate({self.func.__name__})" + + +def validate_gh_user(username: str, hotfix_known_errorenous_names: bool = True) -> str: + if hotfix_known_errorenous_names: + if username == "Constantin Pape": + return "constantinpape" + + if ( + username.lower() in KNOWN_GH_USERS + or not validation_context_var.get().perform_io_checks + ): + return username + + if username.lower() in KNOWN_INVALID_GH_USERS: + raise ValueError(f"Known invalid GitHub user '{username}'") + + r = requests.get( + f"https://api.github.com/users/{username}", auth=settings.github_auth + ) + if r.status_code == 403 and r.reason == "rate limit exceeded": + issue_warning( + "Could not verify GitHub user '{value}' due to GitHub API rate limit", + value=username, + ) + elif r.status_code != 200: + KNOWN_INVALID_GH_USERS.add(username.lower()) + raise ValueError(f"Could not find GitHub user '{username}'") + + KNOWN_GH_USERS.add(username.lower()) + return username diff --git a/bioimageio/spec/_internal/field_warning.py b/bioimageio/spec/_internal/field_warning.py new file mode 100644 index 000000000..31c013c5d --- /dev/null +++ b/bioimageio/spec/_internal/field_warning.py @@ -0,0 +1,147 @@ +import dataclasses +import sys +from typing import TYPE_CHECKING, Any, Dict, Literal, Optional, Union, get_args + +import pydantic.functional_validators +from annotated_types import BaseMetadata, GroupedMetadata +from loguru import logger +from pydantic import TypeAdapter +from pydantic._internal._decorators import inspect_validator +from pydantic_core import PydanticCustomError +from pydantic_core.core_schema import ( + NoInfoValidatorFunction, + ValidationInfo, + WithInfoValidatorFunction, +) +from typing_extensions import Annotated, LiteralString + +from .._internal.validation_context import validation_context_var +from .._internal.warning_levels import WARNING, WarningSeverity + +if TYPE_CHECKING: + from pydantic.functional_validators import _V2Validator # type: ignore + +if sys.version_info < (3, 10): + SLOTS: Dict[str, Any] = {} +else: + SLOTS = {"slots": True} + + +ValidatorFunction = Union[NoInfoValidatorFunction, WithInfoValidatorFunction] + +AnnotationMetaData = Union[BaseMetadata, GroupedMetadata] + + +def warn( + typ: Union[AnnotationMetaData, Any], + msg: LiteralString, # warning message, e.g. "'{value}' incompatible with {typ} + severity: WarningSeverity = WARNING, +): + """treat a type or its annotation metadata as a warning condition""" + if isinstance(typ, get_args(AnnotationMetaData)): + typ = Annotated[Any, typ] + + validator = TypeAdapter(typ) + + return AfterWarner( + validator.validate_python, severity=severity, msg=msg, context={"typ": typ} + ) + + +def call_validator_func( + func: "_V2Validator", + mode: Literal["after", "before", "plain", "wrap"], + value: Any, + info: ValidationInfo, +) -> Any: + info_arg = inspect_validator(func, mode) + if info_arg: + return func(value, info) # type: ignore + else: + return func(value) # type: ignore + + +def as_warning( + func: "_V2Validator", + *, + mode: Literal["after", "before", "plain", "wrap"] = "after", + severity: WarningSeverity = WARNING, + msg: Optional[LiteralString] = None, + msg_context: Optional[Dict[str, Any]] = None, +) -> ValidatorFunction: + """turn validation function into a no-op, based on warning level""" + + def wrapper(value: Any, info: ValidationInfo) -> Any: + try: + call_validator_func(func, mode, value, info) + except (AssertionError, ValueError) as e: + issue_warning( + msg or ",".join(e.args), + value=value, + severity=severity, + msg_context=msg_context, + ) + + return value + + return wrapper + + +@dataclasses.dataclass(frozen=True, **SLOTS) +class AfterWarner(pydantic.functional_validators.AfterValidator): + """Like AfterValidator, but wraps validation `func` `as_warning`""" + + severity: WarningSeverity = WARNING + msg: Optional[LiteralString] = None + context: Optional[Dict[str, Any]] = None + + def __post_init__(self): + object.__setattr__( + self, + "func", + as_warning( + self.func, + mode="after", + severity=self.severity, + msg=self.msg, + msg_context=self.context, + ), + ) + + +@dataclasses.dataclass(frozen=True, **SLOTS) +class BeforeWarner(pydantic.functional_validators.BeforeValidator): + """Like BeforeValidator, but wraps validation `func` `as_warning`""" + + severity: WarningSeverity = WARNING + msg: Optional[LiteralString] = None + context: Optional[Dict[str, Any]] = None + + def __post_init__(self): + object.__setattr__( + self, + "func", + as_warning( + self.func, + mode="before", + severity=self.severity, + msg=self.msg, + msg_context=self.context, + ), + ) + + +# TODO: add `loc: Loc` to `issue_warning()` +# and use a loguru handler to format warnings accordingly +def issue_warning( + msg: LiteralString, + *, + value: Any, + severity: WarningSeverity = WARNING, + msg_context: Optional[Dict[str, Any]] = None, +): + msg_context = {"value": value, "severity": severity, **(msg_context or {})} + if severity >= validation_context_var.get().warning_level: + raise PydanticCustomError("warning", msg, msg_context) + elif validation_context_var.get().log_warnings: + logger.log(severity, msg.format(**msg_context)) diff --git a/bioimageio/spec/_internal/io.py b/bioimageio/spec/_internal/io.py new file mode 100644 index 000000000..bbcf559e7 --- /dev/null +++ b/bioimageio/spec/_internal/io.py @@ -0,0 +1,604 @@ +from __future__ import annotations + +import hashlib +import sys +import warnings +from abc import abstractmethod +from dataclasses import dataclass +from datetime import date as _date +from datetime import datetime as _datetime +from pathlib import Path, PurePath +from typing import ( + Any, + Dict, + Generic, + List, + Optional, + Sequence, + Tuple, + Type, + TypedDict, + TypeVar, + Union, +) +from urllib.parse import urlparse, urlsplit, urlunsplit + +import pooch +import pydantic +from pydantic import ( + AfterValidator, + AnyUrl, + DirectoryPath, + FilePath, + GetCoreSchemaHandler, + PlainSerializer, + PrivateAttr, + RootModel, + SerializationInfo, + StringConstraints, + TypeAdapter, + model_validator, +) +from pydantic_core import core_schema +from typing_extensions import ( + Annotated, + LiteralString, + NotRequired, + Self, + Unpack, + assert_never, +) +from typing_extensions import TypeAliasType as _TypeAliasType + +from .._internal._settings import settings +from .._internal.io_basics import ( + ALL_BIOIMAGEIO_YAML_NAMES, + BIOIMAGEIO_YAML, + AbsoluteDirectory, + AbsoluteFilePath, + FileName, +) +from .._internal.node import Node +from .._internal.packaging_context import packaging_context_var +from .._internal.root_url import RootHttpUrl +from .._internal.url import HttpUrl +from .._internal.validated_string import ValidatedString +from .._internal.validation_context import ( + validation_context_var, +) + +if sys.version_info < (3, 10): + SLOTS: Dict[str, bool] = {} +else: + SLOTS = {"slots": True} + + +Sha256 = ValidatedString[ + Annotated[ + str, + StringConstraints( + strip_whitespace=True, to_lower=True, min_length=64, max_length=64 + ), + ] +] + +AbsolutePathT = TypeVar( + "AbsolutePathT", bound=Union[HttpUrl, AbsoluteDirectory, AbsoluteFilePath] +) + + +class RelativePathBase(RootModel[PurePath], Generic[AbsolutePathT], frozen=True): + _absolute: AbsolutePathT = PrivateAttr() + + @property + def path(self) -> PurePath: + return self.root + + @property + def absolute(self) -> AbsolutePathT: + """the absolute path/url (resolved at time of initialization with the root of the ValidationContext)""" + return self._absolute + + def model_post_init(self, __context: Any) -> None: + if self.root.is_absolute(): + raise ValueError(f"{self.root} is an absolute path.") + + self._absolute = ( # pyright: ignore[reportAttributeAccessIssue] + self.get_absolute(validation_context_var.get().root) + ) + super().model_post_init(__context) + + # @property + # def __members(self): + # return (self.path,) + + # def __eq__(self, __value: object) -> bool: + # return type(__value) is type(self) and self.__members == __value.__members + + # def __hash__(self) -> int: + # return hash(self.__members) + + def __str__(self) -> str: + return self.root.as_posix() + + def __repr__(self) -> str: + return f"RelativePath('{self}')" + + @abstractmethod + def get_absolute( + self, root: Union[RootHttpUrl, AbsoluteDirectory, pydantic.AnyUrl] + ) -> AbsolutePathT: ... + + def _get_absolute_impl( + self, root: Union[RootHttpUrl, AbsoluteDirectory, pydantic.AnyUrl] + ) -> Union[Path, HttpUrl]: + if isinstance(root, Path): + return (root / self.root).absolute() + + parsed = urlsplit(str(root)) + path = list(parsed.path.strip("/").split("/")) + rel_path = self.root.as_posix().strip("/") + if ( + parsed.netloc == "zenodo.org" + and parsed.path.startswith("/api/records/") + and parsed.path.endswith("/content") + ): + path.insert(-1, rel_path) + else: + path.append(rel_path) + + return HttpUrl( + urlunsplit( + ( + parsed.scheme, + parsed.netloc, + "/".join(path), + parsed.query, + parsed.fragment, + ) + ) + ) + + @classmethod + def _validate(cls, value: Union[PurePath, str]): + if isinstance(value, str) and ( + value.startswith("https://") or value.startswith("http://") + ): + raise ValueError(f"{value} looks like a URL, not a relative path") + + return cls(PurePath(value)) + + +class RelativePath( + RelativePathBase[Union[AbsoluteFilePath, AbsoluteDirectory, HttpUrl]], frozen=True +): + def get_absolute( + self, root: "RootHttpUrl | Path | AnyUrl" + ) -> "AbsoluteFilePath | AbsoluteDirectory | HttpUrl": + absolute = self._get_absolute_impl(root) + if ( + isinstance(absolute, Path) + and validation_context_var.get().perform_io_checks + and not absolute.exists() + ): + raise ValueError(f"{absolute} does not exist") + + return absolute + + +class RelativeFilePath(RelativePathBase[Union[AbsoluteFilePath, HttpUrl]], frozen=True): + def get_absolute( + self, root: "RootHttpUrl | Path | AnyUrl" + ) -> "AbsoluteFilePath | HttpUrl": + absolute = self._get_absolute_impl(root) + if ( + isinstance(absolute, Path) + and validation_context_var.get().perform_io_checks + and not absolute.is_file() + ): + raise ValueError(f"{absolute} does not point to an existing file") + + return absolute + + +class RelativeDirectory( + RelativePathBase[Union[AbsoluteDirectory, HttpUrl]], frozen=True +): + def get_absolute( + self, root: "RootHttpUrl | Path | AnyUrl" + ) -> "AbsoluteDirectory | HttpUrl": + absolute = self._get_absolute_impl(root) + if ( + isinstance(absolute, Path) + and validation_context_var.get().perform_io_checks + and not absolute.is_dir() + ): + raise ValueError(f"{absolute} does not point to an existing directory") + + return absolute + + +FileSource = Union[FilePath, RelativeFilePath, HttpUrl, pydantic.HttpUrl] +PermissiveFileSource = Union[FileSource, str] + +V_suffix = TypeVar("V_suffix", bound=FileSource) +path_or_url_adapter = TypeAdapter(Union[FilePath, DirectoryPath, HttpUrl]) + + +def validate_suffix( + value: V_suffix, suffix: Union[str, Sequence[str]], case_sensitive: bool +) -> V_suffix: + """check final suffix""" + if isinstance(suffix, str): + suffixes = [suffix] + else: + suffixes = suffix + + assert len(suffixes) > 0, "no suffix given" + assert all( + suff.startswith(".") for suff in suffixes + ), "expected suffixes to start with '.'" + o_value = value + strict = interprete_file_source(value) + + if isinstance(strict, (HttpUrl, AnyUrl)): + if strict.path is None or "." not in (path := strict.path): + actual_suffix = "" + elif ( + strict.host == "zenodo.org" + and path.startswith("/api/records/") + and path.endswith("/content") + ): + actual_suffix = "." + path[: -len("/content")].split(".")[-1] + else: + actual_suffix = "." + path.split(".")[-1] + + elif isinstance(strict, PurePath): + actual_suffix = strict.suffixes[-1] + elif isinstance(strict, RelativeFilePath): + actual_suffix = strict.path.suffixes[-1] + else: + assert_never(strict) + + if ( + case_sensitive + and actual_suffix not in suffixes + or not case_sensitive + and actual_suffix.lower() not in [s.lower() for s in suffixes] + ): + if len(suffixes) == 1: + raise ValueError(f"Expected suffix {suffixes[0]}, but got {actual_suffix}") + else: + raise ValueError( + f"Expected a suffix from {suffixes}, but got {actual_suffix}" + ) + + return o_value + + +@dataclass(frozen=True, **SLOTS) +class WithSuffix: + suffix: Union[LiteralString, Tuple[LiteralString, ...]] + case_sensitive: bool + + def __get_pydantic_core_schema__( + self, source: Type[Any], handler: GetCoreSchemaHandler + ): + if not self.suffix: + raise ValueError("suffix may not be empty") + + schema = handler(source) + return core_schema.no_info_after_validator_function( + self.validate, + schema, + ) + + def validate(self, value: FileSource) -> FileSource: + return validate_suffix(value, self.suffix, case_sensitive=self.case_sensitive) + + +def wo_special_file_name(src: FileSource) -> FileSource: + if has_valid_rdf_name(src): + raise ValueError( + f"'{src}' not allowed here as its filename is reserved to identify" + + f" '{BIOIMAGEIO_YAML}' (or equivalent) files." + ) + + return src + + +def _package(value: FileSource, info: SerializationInfo) -> Union[str, Path, FileName]: + if (packaging_context := packaging_context_var.get()) is None: + # convert to standard python obj + # note: pydantic keeps returning Rootmodels (here `HttpUrl`) as-is, but if + # this function returns one RootModel, paths are "further serialized" by + # returning the 'root' attribute, which is incorrect. + # see https://github.com/pydantic/pydantic/issues/8963 + # TODO: follow up on https://github.com/pydantic/pydantic/issues/8963 + if isinstance(value, Path): + unpackaged = value + elif isinstance(value, HttpUrl): + unpackaged = value.root + elif isinstance(value, RelativeFilePath): + unpackaged = Path(value.path) + elif isinstance(value, AnyUrl): + unpackaged = str(value) + else: + assert_never(value) + + if info.mode_is_json(): + # convert to json value # TODO: remove and let pydantic do this? + if isinstance(unpackaged, Path): + unpackaged = str(unpackaged) + elif isinstance(unpackaged, str): + pass + else: + assert_never(unpackaged) + else: + warnings.warn( + "dumping with mode='python' is currently not fully supported for " + + "fields that are included when packaging; returned objects are " + + "standard python objects" + ) + + return unpackaged # return unpackaged file source + + # package the file source: + # add it to the current package's file sources and return its collision free file name + if isinstance(value, RelativeFilePath): + src = value.absolute + elif isinstance(value, pydantic.AnyUrl): + src = HttpUrl(str(value)) + elif isinstance(value, HttpUrl): + src = value + elif isinstance(value, Path): + src = value.resolve() + else: + assert_never(value) + + fname = extract_file_name(src) + if fname == packaging_context.bioimageio_yaml_file_name: + raise ValueError( + f"Reserved file name '{packaging_context.bioimageio_yaml_file_name}' " + + "not allowed for a file to be packaged" + ) + + fsrcs = packaging_context.file_sources + assert not any( + fname.endswith(special) for special in ALL_BIOIMAGEIO_YAML_NAMES + ), fname + if fname in fsrcs and fsrcs[fname] != src: + for i in range(2, 20): + fn, *ext = fname.split(".") + alternative_file_name = ".".join([f"{fn}_{i}", *ext]) + if ( + alternative_file_name not in fsrcs + or fsrcs[alternative_file_name] == src + ): + fname = alternative_file_name + break + else: + raise ValueError(f"Too many file name clashes for {fname}") + + fsrcs[fname] = src + return fname + + +include_in_package_serializer = PlainSerializer(_package, when_used="unless-none") +ImportantFileSource = Annotated[ + FileSource, + AfterValidator(wo_special_file_name), + include_in_package_serializer, +] + + +def has_valid_rdf_name(src: FileSource) -> bool: + return is_valid_rdf_name(extract_file_name(src)) + + +def is_valid_rdf_name(file_name: FileName) -> bool: + for special in ALL_BIOIMAGEIO_YAML_NAMES: + if file_name.endswith(special): + return True + + return False + + +def ensure_has_valid_rdf_name(src: FileSource) -> FileSource: + if not has_valid_rdf_name(src): + raise ValueError( + f"'{src}' does not have a valid filename to identify" + + f" '{BIOIMAGEIO_YAML}' (or equivalent) files." + ) + + return src + + +def ensure_is_valid_rdf_name(file_name: FileName) -> FileName: + if not is_valid_rdf_name(file_name): + raise ValueError( + f"'{file_name}' is not a valid filename to identify" + + f" '{BIOIMAGEIO_YAML}' (or equivalent) files." + ) + + return file_name + + +# types as loaded from YAML 1.2 (with ruyaml) +YamlLeafValue = Union[ + bool, _date, _datetime, int, float, str, None +] # note: order relevant for deserializing +YamlKey = Union[ # YAML Arrays are cast to tuples if used as key in mappings + YamlLeafValue, Tuple[YamlLeafValue, ...] # (nesting is not allowed though) +] +YamlValue = _TypeAliasType( + "YamlValue", + Union[YamlLeafValue, List["YamlValue"], Dict[YamlKey, "YamlValue"]], +) +BioimageioYamlContent = Dict[str, YamlValue] +BioimageioYamlSource = Union[PermissiveFileSource, BioimageioYamlContent] + + +@dataclass +class OpenedBioimageioYaml: + content: BioimageioYamlContent + original_root: Union[AbsoluteDirectory, RootHttpUrl] + original_file_name: str + + +@dataclass +class DownloadedFile: + path: FilePath + original_root: Union[AbsoluteDirectory, RootHttpUrl] + original_file_name: str + + +class HashKwargs(TypedDict): + sha256: NotRequired[Optional[Sha256]] + + +StrictFileSource = Union[HttpUrl, FilePath, RelativeFilePath] +_strict_file_source_adapter = TypeAdapter(StrictFileSource) + + +def interprete_file_source(file_source: PermissiveFileSource) -> StrictFileSource: + if isinstance(file_source, pydantic.AnyUrl): + file_source = str(file_source) + + if isinstance(file_source, str): + return _strict_file_source_adapter.validate_python(file_source) + else: + return file_source + + +def _get_known_hash(hash_kwargs: HashKwargs): + if "sha256" in hash_kwargs and hash_kwargs["sha256"] is not None: + return f"sha256:{hash_kwargs['sha256']}" + else: + return None + + +def _get_unique_file_name(url: Union[HttpUrl, pydantic.HttpUrl]): + """ + Create a unique file name based on the given URL; + adapted from pooch.utils.unique_file_name + """ + md5 = hashlib.md5(str(url).encode()).hexdigest() + fname = extract_file_name(url) + # Crop the start of the file name to fit 255 characters including the hash + # and the : + fname = fname[-(255 - len(md5) - 1) :] + unique_name = f"{md5}-{fname}" + return unique_name + + +def download( + source: Union[PermissiveFileSource, FileDescr], + /, + **kwargs: Unpack[HashKwargs], +) -> DownloadedFile: + if isinstance(source, FileDescr): + return source.download() + + strict_source = interprete_file_source(source) + if isinstance(strict_source, RelativeFilePath): + strict_source = strict_source.absolute + + if isinstance(strict_source, PurePath): + local_source = strict_source + root: Union[RootHttpUrl, DirectoryPath] = strict_source.parent + else: + if strict_source.scheme not in ("http", "https"): + raise NotImplementedError(strict_source.scheme) + + if settings.CI: + headers = {"User-Agent": "ci"} + progressbar = False + else: + headers = {} + progressbar = True + + if settings.user_agent is not None: + headers["User-Agent"] = settings.user_agent + + downloader = pooch.HTTPDownloader(headers=headers, progressbar=progressbar) + fname = _get_unique_file_name(strict_source) + _ls: Any = pooch.retrieve( + url=str(strict_source), + known_hash=_get_known_hash(kwargs), + downloader=downloader, + fname=fname, + ) + local_source = Path(_ls).absolute() + root = strict_source.parent + + return DownloadedFile( + local_source, + root, + extract_file_name(strict_source), + ) + + +class FileDescr(Node): + source: ImportantFileSource + """โˆˆ๐Ÿ“ฆ file source""" + + sha256: Optional[Sha256] = None + """SHA256 checksum of the source file""" + + @model_validator(mode="after") + def validate_sha256(self) -> Self: + context = validation_context_var.get() + if not context.perform_io_checks: + return self + + local_source = download(self.source, sha256=self.sha256).path + actual_sha = get_sha256(local_source) + if self.sha256 is None: + self.sha256 = actual_sha + elif self.sha256 != actual_sha: + raise ValueError( + f"Sha256 mismatch for {self.source}. Expected {self.sha256}, got " + + f"{actual_sha}. Update expected `sha256` or point to the matching " + + "file." + ) + + return self + + def download(self): + + return download(self.source, sha256=self.sha256) + + +def extract_file_name( + src: Union[pydantic.HttpUrl, HttpUrl, PurePath, RelativeFilePath], +) -> str: + if isinstance(src, RelativeFilePath): + return src.path.name + elif isinstance(src, PurePath): + return src.name + else: + url = urlparse(str(src)) + if ( + url.scheme == "https" + and url.hostname == "zenodo.org" + and url.path.startswith("/api/records/") + and url.path.endswith("/content") + ): + return url.path.split("/")[-2] + else: + return url.path.split("/")[-1] + + +def get_sha256(path: Path) -> Sha256: + """from https://stackoverflow.com/a/44873382""" + h = hashlib.sha256() + b = bytearray(128 * 1024) + mv = memoryview(b) + with open(path, "rb", buffering=0) as f: + for n in iter(lambda: f.readinto(mv), 0): + h.update(mv[:n]) + + sha = h.hexdigest() + assert len(sha) == 64 + return Sha256(sha) diff --git a/bioimageio/spec/_internal/io_basics.py b/bioimageio/spec/_internal/io_basics.py new file mode 100644 index 000000000..24773d144 --- /dev/null +++ b/bioimageio/spec/_internal/io_basics.py @@ -0,0 +1,13 @@ +from pathlib import Path + +from annotated_types import Predicate +from pydantic import DirectoryPath, FilePath +from typing_extensions import Annotated + +FileName = str +AbsoluteDirectory = Annotated[DirectoryPath, Predicate(Path.is_absolute)] +AbsoluteFilePath = Annotated[FilePath, Predicate(Path.is_absolute)] + +BIOIMAGEIO_YAML = "rdf.yaml" +ALTERNATIVE_BIOIMAGEIO_YAML_NAMES = ("bioimageio.yaml", "model.yaml") +ALL_BIOIMAGEIO_YAML_NAMES = (BIOIMAGEIO_YAML,) + ALTERNATIVE_BIOIMAGEIO_YAML_NAMES diff --git a/bioimageio/spec/_internal/io_utils.py b/bioimageio/spec/_internal/io_utils.py new file mode 100644 index 000000000..18d7cd9da --- /dev/null +++ b/bioimageio/spec/_internal/io_utils.py @@ -0,0 +1,210 @@ +import io +import platform +import warnings +from contextlib import nullcontext +from pathlib import Path +from typing import Any, Dict, Iterable, Mapping, Optional, TextIO, Union, cast +from zipfile import ZipFile, is_zipfile + +import numpy +from numpy.typing import NDArray +from pydantic import DirectoryPath, FilePath, NewPath +from ruyaml import YAML +from typing_extensions import Unpack + +from .io import ( + BIOIMAGEIO_YAML, + BioimageioYamlContent, + FileDescr, + HashKwargs, + OpenedBioimageioYaml, + YamlValue, + download, +) +from .io_basics import ALTERNATIVE_BIOIMAGEIO_YAML_NAMES, FileName +from .types import FileSource, PermissiveFileSource + +if platform.machine() == "wasm32": + import pyodide_http # type: ignore + + pyodide_http.patch_all() + + +yaml = YAML(typ="safe") + + +def read_yaml(file: Union[FilePath, TextIO]) -> YamlValue: + if isinstance(file, Path): + cm = file.open("r", encoding="utf-8") + else: + cm = nullcontext(file) + + with cm as f: + content: YamlValue = yaml.load(f) + + return content + + +def write_yaml(content: YamlValue, /, file: Union[NewPath, FilePath, TextIO]): + if isinstance(file, Path): + cm = file.open("w", encoding="utf-8") + else: + cm = nullcontext(file) + + with cm as f: + yaml.dump(content, f) + + +def _sanitize_bioimageio_yaml(content: YamlValue) -> BioimageioYamlContent: + if not isinstance(content, dict): + raise ValueError( + f"Expected {BIOIMAGEIO_YAML} content to be a mapping (got {type(content)})." + ) + + for key in content: + if not isinstance(key, str): + raise ValueError( + f"Expected all keys (field names) in a {BIOIMAGEIO_YAML} " + + f"need to be strings (got '{key}' of type {type(key)})." + ) + + return cast(BioimageioYamlContent, content) + + +def open_bioimageio_yaml( + source: PermissiveFileSource, /, **kwargs: Unpack[HashKwargs] +) -> OpenedBioimageioYaml: + downloaded = download(source, **kwargs) + local_source = downloaded.path + root = downloaded.original_root + + if is_zipfile(local_source): + local_source = unzip(local_source) + + if local_source.is_dir(): + root = local_source + local_source = local_source / find_description_file_name(local_source) + + content = _sanitize_bioimageio_yaml(read_yaml(local_source)) + + return OpenedBioimageioYaml(content, root, downloaded.original_file_name) + + +def identify_bioimageio_yaml_file(file_names: Iterable[FileName]) -> FileName: + file_names = sorted(file_names) + for bioimageio_name in (BIOIMAGEIO_YAML,) + ALTERNATIVE_BIOIMAGEIO_YAML_NAMES: + for fname in file_names: + if fname == bioimageio_name or fname.endswith(f".{fname}"): + return fname + + raise ValueError( + f"No {BIOIMAGEIO_YAML} found in {file_names}. (Looking for '{BIOIMAGEIO_YAML}'" + + " or or any of the alterntive file names:" + + f" {ALTERNATIVE_BIOIMAGEIO_YAML_NAMES},of any file with an extension of" + + f" those, e.g. 'anything.{BIOIMAGEIO_YAML}')." + ) + + +def find_description_file_name(path: Path) -> FileName: + if path.is_file(): + if not is_zipfile(path): + return path.name + + with ZipFile(path, "r") as f: + file_names = identify_bioimageio_yaml_file(f.namelist()) + else: + file_names = [p.name for p in path.glob("*")] + + return identify_bioimageio_yaml_file(file_names) + + +def unzip( + zip_file: Union[FilePath, ZipFile], + out_path: Optional[DirectoryPath] = None, + overwrite: bool = False, +) -> DirectoryPath: + if isinstance(zip_file, ZipFile): + zip_context = nullcontext(zip_file) + if out_path is None: + raise ValueError("Missing argument: out_path") + else: + zip_context = ZipFile(zip_file, "r") + if out_path is None: + out_path = zip_file.with_suffix(zip_file.suffix + ".unzip") + + with zip_context as f: + if out_path.exists() and overwrite: + if overwrite: + warnings.warn(f"Overwriting existing unzipped archive at {out_path}") + else: + found_content = { + p.relative_to(out_path).as_posix() for p in out_path.glob("*") + } + expected_content = {info.filename for info in f.filelist} + if expected_content - found_content: + warnings.warn( + f"Unzipped archive at {out_path} is missing expected files." + ) + parts = out_path.name.split("_") + nr, *suffixes = parts[-1].split(".") + if nr.isdecimal(): + nr = str(int(nr) + 1) + else: + nr = f"1.{nr}" + + parts[-1] = ".".join([nr, *suffixes]) + return unzip( + f, out_path.with_name("_".join(parts)), overwrite=overwrite + ) + else: + warnings.warn( + "Using already unzipped archive with all expected files at" + + f" {out_path}." + ) + return out_path + + f.extractall(out_path) + + return out_path + + +def write_zip( + path: FilePath, + content: Mapping[FileName, Union[str, FilePath, Dict[Any, Any]]], + *, + compression: int, + compression_level: int, +) -> None: + """Write a zip archive. + + Args: + path: output path to write to. + content: dict mapping archive names to local file paths, strings (for text files), or dict (for yaml files). + compression: The numeric constant of compression method. + compression_level: Compression level to use when writing files to the archive. + See https://docs.python.org/3/library/zipfile.html#zipfile.ZipFile + + """ + with ZipFile( + path, "w", compression=compression, compresslevel=compression_level + ) as myzip: + for arc_name, file in content.items(): + if isinstance(file, dict): + buf = io.StringIO() + write_yaml(file, buf) + file = buf.getvalue() + + if isinstance(file, str): + myzip.writestr(arc_name, file.encode("utf-8")) + else: + myzip.write(file, arcname=arc_name) + + +def load_array(source: Union[FileSource, FileDescr]) -> NDArray[Any]: + path = download(source).path + + return numpy.load(path, allow_pickle=False) + + +def save_array(path: Path, array: NDArray[Any]) -> None: + return numpy.save(path, array, allow_pickle=False) diff --git a/bioimageio/spec/_internal/license_id.py b/bioimageio/spec/_internal/license_id.py new file mode 100644 index 000000000..0e3dff689 --- /dev/null +++ b/bioimageio/spec/_internal/license_id.py @@ -0,0 +1,24 @@ +from typing import TypeVar + +from ._generated_spdx_license_literals import ( + DeprecatedLicenseId as DeprecatedLicenseIdLiteral, +) +from ._generated_spdx_license_literals import LicenseId as LicenseIdLiteral +from .validated_string import ValidatedString + +LicenceT = TypeVar("LicenceT", LicenseIdLiteral, DeprecatedLicenseIdLiteral) + + +class _LicenseId(ValidatedString[LicenceT], frozen=True): + def __repr__(self): + # don't include full literal in class repr + name, *_ = self.__class__.__name__.split("[") + return f'{name}("{self.root}")' + + +class DeprecatedLicenseId(_LicenseId[DeprecatedLicenseIdLiteral], frozen=True): + pass + + +class LicenseId(_LicenseId[LicenseIdLiteral], frozen=True): + pass diff --git a/bioimageio/spec/_internal/node.py b/bioimageio/spec/_internal/node.py new file mode 100644 index 000000000..245585e84 --- /dev/null +++ b/bioimageio/spec/_internal/node.py @@ -0,0 +1,69 @@ +from __future__ import annotations + +from typing import ( + Any, + Dict, + Optional, + Union, +) + +import pydantic +from typing_extensions import ( + Self, +) + +from .validation_context import ValidationContext, validation_context_var + + +class Node( + pydantic.BaseModel, + extra="forbid", + frozen=False, + populate_by_name=True, + revalidate_instances="never", + validate_assignment=True, + validate_default=False, + validate_return=True, # TODO: check if False here would bring a speedup and can still be safe + # use_attribute_docstrings=True, TODO: use this from future pydantic 2.7 + # see https://github.com/pydantic/pydantic/issues/5656 +): + """Subpart of a resource description""" + + @classmethod + def model_validate( + cls, + obj: Union[Any, Dict[str, Any]], + *, + strict: Optional[bool] = None, + from_attributes: Optional[bool] = None, + context: Union[ValidationContext, Dict[str, Any], None] = None, + ) -> Self: + """Validate a pydantic model instance. + + Args: + obj: The object to validate. + strict: Whether to raise an exception on invalid fields. + from_attributes: Whether to extract data from object attributes. + context: Additional context to pass to the validator. + + Raises: + ValidationError: If the object failed validation. + + Returns: + The validated description instance. + """ + __tracebackhide__ = True + + if context is None: + context = validation_context_var.get() + elif isinstance(context, dict): + context = ValidationContext(**context) + + if isinstance(obj, dict): + assert all(isinstance(k, str) for k in obj), obj + + with context: + # use validation context as context manager for equal behavior of __init__ and model_validate + return super().model_validate( + obj, strict=strict, from_attributes=from_attributes + ) diff --git a/bioimageio/spec/_internal/packaging_context.py b/bioimageio/spec/_internal/packaging_context.py new file mode 100644 index 000000000..34e5af466 --- /dev/null +++ b/bioimageio/spec/_internal/packaging_context.py @@ -0,0 +1,52 @@ +from __future__ import annotations + +from contextvars import ContextVar, Token +from dataclasses import dataclass, field +from typing import Dict, List, Optional, Union + +from .io_basics import AbsoluteFilePath, FileName +from .url import HttpUrl + + +@dataclass(frozen=True) +class PackagingContext: + _context_tokens: "List[Token[Optional[PackagingContext]]]" = field( + init=False, default_factory=list + ) + + bioimageio_yaml_file_name: FileName + + file_sources: Dict[FileName, Union[AbsoluteFilePath, HttpUrl]] = field( + default_factory=dict + ) + """File sources to include in the packaged resource""" + + def replace( + self, + *, + bioimageio_yaml_file_name: Optional[FileName] = None, + file_sources: Optional[Dict[FileName, Union[AbsoluteFilePath, HttpUrl]]] = None, + ) -> "PackagingContext": + """return a modiefied copy""" + return PackagingContext( + bioimageio_yaml_file_name=( + self.bioimageio_yaml_file_name + if bioimageio_yaml_file_name is None + else bioimageio_yaml_file_name + ), + file_sources=( + dict(self.file_sources) if file_sources is None else file_sources + ), + ) + + def __enter__(self): + self._context_tokens.append(packaging_context_var.set(self)) + return self + + def __exit__(self, type, value, traceback): # type: ignore + packaging_context_var.reset(self._context_tokens.pop(-1)) + + +packaging_context_var: ContextVar[Optional[PackagingContext]] = ContextVar( + "packaging_context_var", default=None +) diff --git a/bioimageio/spec/_internal/root_url.py b/bioimageio/spec/_internal/root_url.py new file mode 100644 index 000000000..f9d28d138 --- /dev/null +++ b/bioimageio/spec/_internal/root_url.py @@ -0,0 +1,65 @@ +from __future__ import annotations + +from typing import Optional +from urllib.parse import urlsplit, urlunsplit + +import pydantic +from pydantic import AfterValidator, TypeAdapter +from typing_extensions import Annotated + +from .validated_string import ValidatedString + +_http_url_adapter = TypeAdapter(pydantic.HttpUrl) # pyright: ignore[reportCallIssue] + + +class RootHttpUrl( + ValidatedString[ + Annotated[ + str, + AfterValidator(lambda value: str(_http_url_adapter.validate_python(value))), + ] + ], + frozen=True, +): + """A 'URL folder', possibly an invalid http URL""" + + @property + def _url(self): + return pydantic.AnyUrl(str(self)) + + @property + def scheme(self) -> str: + return self._url.scheme + + @property + def host(self) -> Optional[str]: + return self._url.host + + @property + def path(self) -> Optional[str]: + return self._url.path + + @property + def parent(self) -> RootHttpUrl: + parsed = urlsplit(str(self)) + path = list(parsed.path.split("/")) + if ( + parsed.netloc == "zenodo.org" + and parsed.path.startswith("/api/records/") + and parsed.path.endswith("/content") + ): + path[-2:-1] = [] + else: + path = path[:-1] + + return RootHttpUrl( + urlunsplit( + ( + parsed.scheme, + parsed.netloc, + "/".join(path), + parsed.query, + parsed.fragment, + ) + ) + ) diff --git a/bioimageio/spec/_internal/types.py b/bioimageio/spec/_internal/types.py new file mode 100644 index 000000000..f04116c0a --- /dev/null +++ b/bioimageio/spec/_internal/types.py @@ -0,0 +1,124 @@ +from __future__ import annotations + +from datetime import datetime +from keyword import iskeyword +from typing import Any, Sequence, TypeVar, Union + +import annotated_types +from dateutil.parser import isoparse +from pydantic import PlainSerializer, RootModel, StringConstraints +from typing_extensions import Annotated, Literal + +from .constants import DOI_REGEX, SI_UNIT_REGEX +from .field_validation import AfterValidator, BeforeValidator +from .io import FileSource as FileSource +from .io import ImportantFileSource as ImportantFileSource +from .io import PermissiveFileSource as PermissiveFileSource +from .io import RelativeFilePath as RelativeFilePath +from .io import Sha256 as Sha256 +from .io_basics import AbsoluteDirectory as AbsoluteDirectory +from .io_basics import AbsoluteFilePath as AbsoluteFilePath +from .io_basics import FileName as FileName +from .license_id import DeprecatedLicenseId as DeprecatedLicenseId +from .license_id import LicenseId as LicenseId +from .url import HttpUrl as HttpUrl +from .validated_string import ValidatedString +from .version_type import Version as Version + +S = TypeVar("S", bound=Sequence[Any]) +NotEmpty = Annotated[S, annotated_types.MinLen(1)] + + +def _validate_identifier(s: str) -> str: + if not s.isidentifier(): + raise ValueError( + f"'{s}' is not a valid (Python) identifier, see" + + " https://docs.python.org/3/reference/lexical_analysis.html#identifiers" + + " for details." + ) + + return s + + +def _validate_is_not_keyword(s: str) -> str: + if iskeyword(s): + raise ValueError(f"'{s}' is a Python keyword and not allowed here.") + + return s + + +def _validate_datetime(dt: Union[datetime, str, Any]) -> datetime: + if isinstance(dt, datetime): + return dt + elif isinstance(dt, str): + return isoparse(dt) + + raise ValueError(f"'{dt}' not a string or datetime.") + + +def _validate_orcid_id(orcid_id: str): + if len(orcid_id) == 19 and all(orcid_id[idx] == "-" for idx in [4, 9, 14]): + check = 0 + for n in orcid_id[:4] + orcid_id[5:9] + orcid_id[10:14] + orcid_id[15:]: + # adapted from stdnum.iso7064.mod_11_2.checksum() + check = (2 * check + int(10 if n == "X" else n)) % 11 + if check == 1: + return orcid_id # valid + + raise ValueError( + f"'{orcid_id} is not a valid ORCID iD in hyphenated groups of 4 digits." + ) + + +# TODO follow up on https://github.com/pydantic/pydantic/issues/8964 +# to remove _serialize_datetime +def _serialize_datetime_json(dt: datetime) -> str: + return dt.isoformat() + + +Datetime = RootModel[ + Annotated[ + datetime, + BeforeValidator(_validate_datetime), + PlainSerializer(_serialize_datetime_json, when_used="json-unless-none"), + ] +] +"""Timestamp in [ISO 8601](#https://en.wikipedia.org/wiki/ISO_8601) format +with a few restrictions listed [here](https://docs.python.org/3/library/datetime.html#datetime.datetime.fromisoformat).""" + +Doi = ValidatedString[Annotated[str, StringConstraints(pattern=DOI_REGEX)]] +FormatVersionPlaceholder = Literal["latest", "discover"] +IdentifierAnno = Annotated[ + NotEmpty[str], + AfterValidator(_validate_identifier), + AfterValidator(_validate_is_not_keyword), +] +Identifier = ValidatedString[IdentifierAnno] +LowerCaseIdentifierAnno = Annotated[IdentifierAnno, annotated_types.LowerCase] +LowerCaseIdentifier = ValidatedString[LowerCaseIdentifierAnno] +OrcidId = ValidatedString[Annotated[str, AfterValidator(_validate_orcid_id)]] +SiUnit = ValidatedString[ + Annotated[ + str, + StringConstraints(min_length=1, pattern=SI_UNIT_REGEX), + BeforeValidator( + lambda s: ( + s.replace("ร—", "ยท").replace("*", "ยท").replace(" ", "ยท") + if isinstance(s, str) + else s + ) + ), + ] +] + +_ResourceIdAnno = Annotated[ + NotEmpty[str], + annotated_types.LowerCase, + annotated_types.Predicate(lambda s: "\\" not in s and s[0] != "/" and s[-1] != "/"), +] +ResourceId = ValidatedString[_ResourceIdAnno] +ApplicationId = ValidatedString[_ResourceIdAnno] +CollectionId = ValidatedString[_ResourceIdAnno] +DatasetId = ValidatedString[_ResourceIdAnno] +ModelId = ValidatedString[_ResourceIdAnno] +NotebookId = ValidatedString[_ResourceIdAnno] diff --git a/bioimageio/spec/_internal/url.py b/bioimageio/spec/_internal/url.py new file mode 100644 index 000000000..11728b78e --- /dev/null +++ b/bioimageio/spec/_internal/url.py @@ -0,0 +1,99 @@ +import requests +import requests.exceptions +from pydantic import model_validator + +from .field_warning import issue_warning +from .root_url import RootHttpUrl +from .validation_context import validation_context_var + + +def check_url(url: str) -> None: + if url.startswith("https://colab.research.google.com/github/"): + # head request for colab returns "Value error, 405: Method Not Allowed" + # therefore we check if the source notebook exists at github instead + val_url = url.replace( + "https://colab.research.google.com/github/", "https://github.com/" + ) + else: + val_url = url + + try: + response = requests.head(val_url, timeout=(3, 3)) + except ( + requests.exceptions.ChunkedEncodingError, + requests.exceptions.ContentDecodingError, + requests.exceptions.InvalidHeader, + requests.exceptions.InvalidJSONError, + requests.exceptions.InvalidSchema, + requests.exceptions.InvalidURL, + requests.exceptions.MissingSchema, + requests.exceptions.StreamConsumedError, + requests.exceptions.TooManyRedirects, + requests.exceptions.UnrewindableBodyError, + requests.exceptions.URLRequired, + ) as e: + raise ValueError( + f"Invalid URL '{url}': {e}\nrequest: {e.request}\nresponse: {e.response}" + ) + except requests.RequestException as e: + issue_warning( + "Failed to validate URL '{value}': {error}\nrequest: {request}\nresponse: {response}", + value=url, + msg_context={"error": str(e), "response": e.response, "request": e.request}, + ) + except Exception as e: + issue_warning( + "Failed to validate URL '{value}': {error}", + value=url, + msg_context={"error": str(e)}, + ) + else: + follow_up_with_get = False + if response.status_code == 302: # found + pass + elif response.status_code in (301, 303, 308): + issue_warning( + "URL redirected ({status_code}): consider updating {value} with new" + + " location: {location}", + value=url, + msg_context={ + "status_code": response.status_code, + "location": response.headers.get("location"), + }, + ) + elif response.status_code == 403: # forbidden + follow_up_with_get = True + issue_warning( + "{status_code}: {reason} {value}", + value=url, + msg_context={ + "status_code": response.status_code, + "reason": response.reason, + }, + ) + elif response.status_code == 405: + issue_warning( + "{status_code}: {reason} {value}", + value=url, + msg_context={ + "status_code": response.status_code, + "reason": response.reason, + }, + ) + elif response.status_code != 200: + raise ValueError(f"{response.status_code}: {response.reason} {url}") + + if follow_up_with_get: + pass + # TODO follow up forbidden head request with get + # motivating example: 403: Forbidden https://elifesciences.org/articles/57613 + + +class HttpUrl(RootHttpUrl, frozen=True): + @model_validator(mode="after") + def _check_url(self): + if not validation_context_var.get().perform_io_checks: + return self + + check_url(str(self)) + return self diff --git a/bioimageio/spec/_internal/utils.py b/bioimageio/spec/_internal/utils.py new file mode 100644 index 000000000..3275c77cb --- /dev/null +++ b/bioimageio/spec/_internal/utils.py @@ -0,0 +1,118 @@ +from __future__ import annotations + +import sys +from functools import wraps +from inspect import signature +from pathlib import Path +from typing import ( + Callable, + Dict, + Set, + Tuple, + Type, + TypeVar, + Union, +) + +from typing_extensions import ParamSpec + +K = TypeVar("K") +V = TypeVar("V") +NestedDict = Dict[K, "NestedDict[K, V] | V"] + + +if sys.version_info < (3, 9): + + def files(package_name: str): + assert package_name == "bioimageio.spec", package_name + return Path(__file__).parent.parent + +else: + from importlib.resources import files as files + + +def nest_dict(flat_dict: Dict[Tuple[K, ...], V]) -> NestedDict[K, V]: + res: NestedDict[K, V] = {} + for k, v in flat_dict.items(): + node: Union[Dict[K, Union[NestedDict[K, V], V]], NestedDict[K, V]] = res + for kk in k[:-1]: + if not isinstance(node, dict): + raise ValueError(f"nesting level collision for flat key {k} at {kk}") + d: NestedDict[K, V] = {} + node = node.setdefault(kk, d) # type: ignore + + if not isinstance(node, dict): + raise ValueError(f"nesting level collision for flat key {k}") + + node[k[-1]] = v + + return res + + +FirstK = TypeVar("FirstK") + + +def nest_dict_with_narrow_first_key( + flat_dict: Dict[Tuple[K, ...], V], first_k: Type[FirstK] +) -> Dict[FirstK, "NestedDict[K, V] | V"]: + """convenience function to annotate a special version of a NestedDict. + Root level keys are of a narrower type than the nested keys. If not a ValueError is raisd. + """ + nested = nest_dict(flat_dict) + invalid_first_keys = [k for k in nested if not isinstance(k, first_k)] + if invalid_first_keys: + raise ValueError(f"Invalid root level keys: {invalid_first_keys}") + + return nested # type: ignore + + +def unindent(text: str, ignore_first_line: bool = False): + """remove minimum count of spaces at beginning of each line. + + Args: + text: indented text + ignore_first_line: allows to correctly unindent doc strings + """ + first = int(ignore_first_line) + lines = text.split("\n") + filled_lines = [line for line in lines[first:] if line] + if len(filled_lines) < 2: + return "\n".join(line.strip() for line in lines) + + indent = min(len(line) - len(line.lstrip(" ")) for line in filled_lines) + return "\n".join(lines[:first] + [line[indent:] for line in lines[first:]]) + + +T = TypeVar("T") +P = ParamSpec("P") + + +def assert_all_params_set_explicitly(fn: Callable[P, T]) -> Callable[P, T]: + @wraps(fn) + def wrapper(*args: P.args, **kwargs: P.kwargs): + n_args = len(args) + missing: Set[str] = set() + + for p in signature(fn).parameters.values(): + if p.kind == p.POSITIONAL_ONLY: + if n_args == 0: + missing.add(p.name) + else: + n_args -= 1 # 'use' positional arg + elif p.kind == p.POSITIONAL_OR_KEYWORD: + if n_args == 0: + if p.name not in kwargs: + missing.add(p.name) + else: + n_args -= 1 # 'use' positional arg + elif p.kind in (p.VAR_POSITIONAL, p.VAR_KEYWORD): + pass + elif p.kind == p.KEYWORD_ONLY: + if p.name not in kwargs: + missing.add(p.name) + + assert not missing, f"parameters {missing} of {fn} are not set explicitly" + + return fn(*args, **kwargs) + + return wrapper diff --git a/bioimageio/spec/_internal/validated_string.py b/bioimageio/spec/_internal/validated_string.py new file mode 100644 index 000000000..f635c69c8 --- /dev/null +++ b/bioimageio/spec/_internal/validated_string.py @@ -0,0 +1,10 @@ +from typing import TypeVar + +from pydantic import RootModel + +S = TypeVar("S", bound=str) + + +class ValidatedString(RootModel[S], frozen=True): + def __str__(self) -> str: + return self.root diff --git a/bioimageio/spec/_internal/validation_context.py b/bioimageio/spec/_internal/validation_context.py new file mode 100644 index 000000000..b20e540af --- /dev/null +++ b/bioimageio/spec/_internal/validation_context.py @@ -0,0 +1,98 @@ +from __future__ import annotations + +from contextvars import ContextVar, Token +from dataclasses import dataclass, field +from pathlib import Path +from typing import List, Optional, Union +from urllib.parse import urlsplit, urlunsplit + +from pydantic import DirectoryPath + +from ._settings import settings +from .io_basics import AbsoluteDirectory, FileName +from .root_url import RootHttpUrl +from .warning_levels import WarningLevel + + +@dataclass(frozen=True) +class ValidationContext: + _context_tokens: "List[Token[ValidationContext]]" = field( + init=False, default_factory=list + ) + + root: Union[RootHttpUrl, AbsoluteDirectory] = Path() + """url/directory serving as base to resolve any relative file paths""" + + warning_level: WarningLevel = 50 + """raise warnings of severity `s` as validation errors if `s >= warning_level`""" + + log_warnings: bool = settings.log_warnings + """if `True` log warnings that are not raised to the console""" + + file_name: Optional[FileName] = None + """file name of the bioimageio Yaml file""" + + perform_io_checks: bool = settings.perform_io_checks + """wether or not to perform validation that requires file io, + e.g. downloading a remote files. + + Existence of local absolute file paths is still being checked.""" + + def replace( + self, + root: Optional[Union[RootHttpUrl, DirectoryPath]] = None, + warning_level: Optional[WarningLevel] = None, + log_warnings: Optional[bool] = None, + file_name: Optional[str] = None, + perform_io_checks: Optional[bool] = None, + ) -> "ValidationContext": + return ValidationContext( + root=self.root if root is None else root, + warning_level=( + self.warning_level if warning_level is None else warning_level + ), + log_warnings=self.log_warnings if log_warnings is None else log_warnings, + file_name=self.file_name if file_name is None else file_name, + perform_io_checks=( + self.perform_io_checks + if perform_io_checks is None + else perform_io_checks + ), + ) + + def __enter__(self): + self._context_tokens.append(validation_context_var.set(self)) + return self + + def __exit__(self, type, value, traceback): # type: ignore + validation_context_var.reset(self._context_tokens.pop(-1)) + + @property + def source_name(self) -> str: + if self.file_name is None: + return "in-memory" + else: + try: + if isinstance(self.root, Path): + source = (self.root / self.file_name).absolute() + else: + parsed = urlsplit(str(self.root)) + path = list(parsed.path.strip("/").split("/")) + [self.file_name] + source = urlunsplit( + ( + parsed.scheme, + parsed.netloc, + "/".join(path), + parsed.query, + parsed.fragment, + ) + ) + except ValueError: + return self.file_name + else: + return str(source) + + +validation_context_var: ContextVar[ValidationContext] = ContextVar( + "validation_context_var", default=ValidationContext() +) diff --git a/bioimageio/spec/_internal/version_type.py b/bioimageio/spec/_internal/version_type.py new file mode 100644 index 000000000..9acb64841 --- /dev/null +++ b/bioimageio/spec/_internal/version_type.py @@ -0,0 +1,192 @@ +from typing import Any, Optional, Tuple, Union + +import packaging.version +from pydantic import PrivateAttr, RootModel + + +class Version(RootModel[Union[str, int, float]]): + """wraps a packaging.version.Version instance for validation in pydantic models""" + + _version: packaging.version.Version = PrivateAttr() + + def __str__(self): + return str(self._version) + + def model_post_init(self, __context: Any) -> None: + self._version = packaging.version.Version(str(self.root)) + return super().model_post_init(__context) + + # the properties below are adopted from and mirror properties of packaging.version.Version + @property + def epoch(self) -> int: + """The epoch of the version. + + >>> Version("2.0.0").epoch + 0 + >>> Version("1!2.0.0").epoch + 1 + """ + return self._version.epoch + + @property + def release(self) -> Tuple[int, ...]: + """The components of the "release" segment of the version. + + >>> Version("1.2.3").release + (1, 2, 3) + >>> Version("2.0.0").release + (2, 0, 0) + >>> Version("1!2.0.0.post0").release + (2, 0, 0) + + Includes trailing zeroes but not the epoch or any pre-release / development / + post-release suffixes. + """ + return self._version.release + + @property + def pre(self) -> Optional[Tuple[str, int]]: + """The pre-release segment of the version. + + >>> print(Version("1.2.3").pre) + None + >>> Version("1.2.3a1").pre + ('a', 1) + >>> Version("1.2.3b1").pre + ('b', 1) + >>> Version("1.2.3rc1").pre + ('rc', 1) + """ + return self._version.pre + + @property + def post(self) -> Optional[int]: + """The post-release number of the version. + + >>> print(Version("1.2.3").post) + None + >>> Version("1.2.3.post1").post + 1 + """ + return self._version.post + + @property + def dev(self) -> Optional[int]: + """The development number of the version. + + >>> print(Version("1.2.3").dev) + None + >>> Version("1.2.3.dev1").dev + 1 + """ + return self._version.dev + + @property + def local(self) -> Optional[str]: + """The local version segment of the version. + + >>> print(Version("1.2.3").local) + None + >>> Version("1.2.3+abc").local + 'abc' + """ + return self._version.local + + @property + def public(self) -> str: + """The public portion of the version. + + >>> Version("1.2.3").public + '1.2.3' + >>> Version("1.2.3+abc").public + '1.2.3' + >>> Version("1.2.3+abc.dev1").public + '1.2.3' + """ + return self._version.public + + @property + def base_version(self) -> str: + """The "base version" of the version. + + >>> Version("1.2.3").base_version + '1.2.3' + >>> Version("1.2.3+abc").base_version + '1.2.3' + >>> Version("1!1.2.3+abc.dev1").base_version + '1!1.2.3' + + The "base version" is the public version of the project without any pre or post + release markers. + """ + return self._version.base_version + + @property + def is_prerelease(self) -> bool: + """Whether this version is a pre-release. + + >>> Version("1.2.3").is_prerelease + False + >>> Version("1.2.3a1").is_prerelease + True + >>> Version("1.2.3b1").is_prerelease + True + >>> Version("1.2.3rc1").is_prerelease + True + >>> Version("1.2.3dev1").is_prerelease + True + """ + return self._version.is_prerelease + + @property + def is_postrelease(self) -> bool: + """Whether this version is a post-release. + + >>> Version("1.2.3").is_postrelease + False + >>> Version("1.2.3.post1").is_postrelease + True + """ + return self._version.is_postrelease + + @property + def is_devrelease(self) -> bool: + """Whether this version is a development release. + + >>> Version("1.2.3").is_devrelease + False + >>> Version("1.2.3.dev1").is_devrelease + True + """ + return self._version.is_devrelease + + @property + def major(self) -> int: + """The first item of :attr:`release` or ``0`` if unavailable. + + >>> Version("1.2.3").major + 1 + """ + return self._version.major + + @property + def minor(self) -> int: + """The second item of :attr:`release` or ``0`` if unavailable. + + >>> Version("1.2.3").minor + 2 + >>> Version("1").minor + 0 + """ + return self._version.minor + + @property + def micro(self) -> int: + """The third item of :attr:`release` or ``0`` if unavailable. + + >>> Version("1.2.3").micro + 3 + >>> Version("1").micro + 0 + """ + return self._version.micro diff --git a/bioimageio/spec/_internal/warning_levels.py b/bioimageio/spec/_internal/warning_levels.py new file mode 100644 index 000000000..36bc49615 --- /dev/null +++ b/bioimageio/spec/_internal/warning_levels.py @@ -0,0 +1,18 @@ +from typing import Literal + +WarningSeverity = Literal[20, 30, 35] +WarningLevel = Literal[WarningSeverity, 50] +"""With warning level x validation warnings of severity >=x are raised. +Highest warning level 50/error does not raise any validaiton warnings (only validation errors).""" + +ERROR, ERROR_NAME = 50, "error" +"""A warning of the error level is always raised (equivalent to a validation error)""" + +ALERT, ALERT_NAME = 35, "alert" +"""no ALERT nor ERROR -> RDF is worriless""" + +WARNING, WARNING_NAME = 30, "warning" +"""no WARNING nor ALERT nor ERROR -> RDF is watertight""" + +INFO, INFO_NAME = 20, "info" +"""info warnings are about purely cosmetic issues, etc.""" diff --git a/bioimageio/spec/_io.py b/bioimageio/spec/_io.py new file mode 100644 index 000000000..31b7c157f --- /dev/null +++ b/bioimageio/spec/_io.py @@ -0,0 +1,58 @@ +from typing import Literal, TextIO, Union, cast + +from pydantic import FilePath, NewPath + +from ._description import ( + DISCOVER, + InvalidDescr, + ResourceDescr, + build_description, + dump_description, +) +from ._internal.common_nodes import ResourceDescrBase +from ._internal.io import BioimageioYamlContent, YamlValue +from ._internal.io_utils import open_bioimageio_yaml, write_yaml +from ._internal.validation_context import ValidationContext +from .common import PermissiveFileSource +from .summary import ValidationSummary + + +def load_description( + source: PermissiveFileSource, + /, + *, + format_version: Union[Literal["discover"], Literal["latest"], str] = DISCOVER, +) -> Union[ResourceDescr, InvalidDescr]: + opened = open_bioimageio_yaml(source) + + return build_description( + opened.content, + context=ValidationContext( + root=opened.original_root, file_name=opened.original_file_name + ), + format_version=format_version, + ) + + +def save_bioimageio_yaml_only( + rd: Union[ResourceDescr, BioimageioYamlContent, InvalidDescr], + /, + file: Union[NewPath, FilePath, TextIO], +): + if isinstance(rd, ResourceDescrBase): + content = dump_description(rd) + else: + content = rd + + write_yaml(cast(YamlValue, content), file) + + +def load_description_and_validate_format_only( + source: PermissiveFileSource, + /, + *, + format_version: Union[Literal["discover"], Literal["latest"], str] = DISCOVER, +) -> ValidationSummary: + rd = load_description(source, format_version=format_version) + assert rd.validation_summary is not None + return rd.validation_summary diff --git a/bioimageio/spec/_package.py b/bioimageio/spec/_package.py new file mode 100644 index 000000000..f35674098 --- /dev/null +++ b/bioimageio/spec/_package.py @@ -0,0 +1,259 @@ +import collections.abc +import re +import shutil +from pathlib import Path +from tempfile import NamedTemporaryFile, mkdtemp +from typing import Dict, Literal, Optional, Sequence, Union, cast +from zipfile import ZIP_DEFLATED + +from pydantic import DirectoryPath, FilePath, NewPath + +from ._description import InvalidDescr, ResourceDescr, build_description +from ._internal.common_nodes import ResourceDescrBase +from ._internal.io import ( + BioimageioYamlContent, + BioimageioYamlSource, + YamlValue, + download, + ensure_is_valid_rdf_name, +) +from ._internal.io_basics import BIOIMAGEIO_YAML, AbsoluteFilePath, FileName +from ._internal.io_utils import open_bioimageio_yaml, write_yaml, write_zip +from ._internal.packaging_context import PackagingContext +from ._internal.url import HttpUrl +from ._internal.validation_context import validation_context_var +from ._io import load_description +from .model.v0_4 import ModelDescr as ModelDescr04 +from .model.v0_4 import WeightsFormat +from .model.v0_5 import ModelDescr as ModelDescr05 + + +def get_os_friendly_file_name(name: str) -> str: + return re.sub(r"\W+|^(?=\d)", "_", name) + + +def get_resource_package_content( + rd: ResourceDescr, + /, + *, + bioimageio_yaml_file_name: FileName = BIOIMAGEIO_YAML, + weights_priority_order: Optional[Sequence[WeightsFormat]] = None, # model only +) -> Dict[FileName, Union[HttpUrl, AbsoluteFilePath, BioimageioYamlContent]]: + """ + Args: + rd: resource description + bioimageio_yaml_file_name: RDF file name + # for model resources only: + weights_priority_order: If given, only the first weights format present in the model is included. + If none of the prioritized weights formats is found a ValueError is raised. + """ + os_friendly_name = get_os_friendly_file_name(rd.name) + bioimageio_yaml_file_name = bioimageio_yaml_file_name.format( + name=os_friendly_name, type=rd.type + ) + + bioimageio_yaml_file_name = ensure_is_valid_rdf_name(bioimageio_yaml_file_name) + content: Dict[FileName, Union[HttpUrl, AbsoluteFilePath]] = {} + with PackagingContext( + bioimageio_yaml_file_name=bioimageio_yaml_file_name, file_sources=content + ): + rdf_content: BioimageioYamlContent = rd.model_dump( + mode="json", exclude_unset=True + ) + + _ = rdf_content.pop("rdf_source", None) + + if weights_priority_order is not None and isinstance( + rd, (ModelDescr04, ModelDescr05) + ): + # select single weights entry + assert isinstance(rdf_content["weights"], dict), type(rdf_content["weights"]) + for wf in weights_priority_order: + w = rdf_content["weights"].get(wf) + if w is not None: + break + else: + raise ValueError( + "None of the weight formats in `weights_priority_order` is present in" + + " the given model." + ) + + assert isinstance(w, dict), type(w) + _ = w.pop("parent", None) + rdf_content["weights"] = {wf: w} + parent = rdf_content.pop("id", None) + parent_version = rdf_content.pop("version", None) + if parent is not None: + rdf_content["parent"] = {"id": parent, "version": parent_version} + + with validation_context_var.get().replace( + root=rd.root, file_name=bioimageio_yaml_file_name + ): + rd_slim = build_description(rdf_content) + + assert not isinstance( + rd_slim, InvalidDescr + ), rd_slim.validation_summary.format() + # repackage without other weights entries + return get_resource_package_content( + rd_slim, bioimageio_yaml_file_name=bioimageio_yaml_file_name + ) + + return {**content, bioimageio_yaml_file_name: rdf_content} + + +def _prepare_resource_package( + source: Union[BioimageioYamlSource, ResourceDescr], + /, + *, + weights_priority_order: Optional[Sequence[WeightsFormat]] = None, +) -> Dict[FileName, Union[FilePath, BioimageioYamlContent]]: + """Prepare to package a resource description; downloads all required files. + + Args: + source: A bioimage.io resource description (as file, raw YAML content or description class) + context: validation context + weights_priority_order: If given only the first weights format present in the model is included. + If none of the prioritized weights formats is found all are included. + """ + context = validation_context_var.get() + bioimageio_yaml_file_name = context.file_name + if isinstance(source, ResourceDescrBase): + descr = source + elif isinstance(source, dict): + descr = build_description(source) + else: + opened = open_bioimageio_yaml(source) + bioimageio_yaml_file_name = opened.original_file_name + context = context.replace( + root=opened.original_root, file_name=opened.original_file_name + ) + with context: + descr = build_description(opened.content) + + if isinstance(descr, InvalidDescr): + raise ValueError(f"{source} is invalid: {descr.validation_summary}") + + with context: + package_content = get_resource_package_content( + descr, + bioimageio_yaml_file_name=bioimageio_yaml_file_name or BIOIMAGEIO_YAML, + weights_priority_order=weights_priority_order, + ) + + local_package_content: Dict[FileName, Union[FilePath, BioimageioYamlContent]] = {} + for k, v in package_content.items(): + if not isinstance(v, collections.abc.Mapping): + v = download(v).path + + local_package_content[k] = v + + return local_package_content + + +def save_bioimageio_package_as_folder( + source: Union[BioimageioYamlSource, ResourceDescr], + /, + *, + output_path: Union[NewPath, DirectoryPath, None] = None, + weights_priority_order: Optional[ # model only + Sequence[ + Literal[ + "keras_hdf5", + "onnx", + "pytorch_state_dict", + "tensorflow_js", + "tensorflow_saved_model_bundle", + "torchscript", + ] + ] + ] = None, +) -> DirectoryPath: + """Write the content of a bioimage.io resource package to a folder. + + Args: + source: bioimageio resource description + output_path: file path to write package to + weights_priority_order: If given only the first weights format present in the model is included. + If none of the prioritized weights formats is found all are included. + + Returns: + directory path to bioimageio package folder + """ + package_content = _prepare_resource_package( + source, + weights_priority_order=weights_priority_order, + ) + if output_path is None: + output_path = Path(mkdtemp()) + else: + output_path = Path(output_path) + + output_path.mkdir(exist_ok=True, parents=True) + for name, source in package_content.items(): + if isinstance(source, collections.abc.Mapping): + write_yaml(cast(YamlValue, source), output_path / name) + else: + shutil.copy(source, output_path / name) + + return output_path + + +def save_bioimageio_package( + source: Union[BioimageioYamlSource, ResourceDescr], + /, + *, + compression: int = ZIP_DEFLATED, + compression_level: int = 1, + output_path: Union[NewPath, FilePath, None] = None, + weights_priority_order: Optional[ # model only + Sequence[ + Literal[ + "keras_hdf5", + "onnx", + "pytorch_state_dict", + "tensorflow_js", + "tensorflow_saved_model_bundle", + "torchscript", + ] + ] + ] = None, +) -> FilePath: + """Package a bioimageio resource as a zip file. + + Args: + rd: bioimageio resource description + compression: The numeric constant of compression method. + compression_level: Compression level to use when writing files to the archive. + See https://docs.python.org/3/library/zipfile.html#zipfile.ZipFile + output_path: file path to write package to + weights_priority_order: If given only the first weights format present in the model is included. + If none of the prioritized weights formats is found all are included. + + Returns: + path to zipped bioimageio package + """ + package_content = _prepare_resource_package( + source, + weights_priority_order=weights_priority_order, + ) + if output_path is None: + output_path = Path( + NamedTemporaryFile(suffix=".bioimageio.zip", delete=False).name + ) + else: + output_path = Path(output_path) + + write_zip( + output_path, + package_content, + compression=compression, + compression_level=compression_level, + ) + if isinstance((exported := load_description(output_path)), InvalidDescr): + raise ValueError( + f"Exported package '{output_path}' is invalid:" + + f" {exported.validation_summary}" + ) + + return output_path diff --git a/bioimageio/spec/application/__init__.py b/bioimageio/spec/application/__init__.py new file mode 100644 index 000000000..37cee006a --- /dev/null +++ b/bioimageio/spec/application/__init__.py @@ -0,0 +1,20 @@ +# autogen: start +""" +implementaions of all released minor versions are available in submodules: +- application v0_2: `bioimageio.spec.application.v0_2.ApplicationDescr` [user documentation](../../../user_docs/application_descr_v0-2.md) +- application v0_3: `bioimageio.spec.application.v0_3.ApplicationDescr` [user documentation](../../../user_docs/application_descr_v0-3.md) +""" +from typing import Union + +from pydantic import Discriminator +from typing_extensions import Annotated + +from .v0_2 import ApplicationDescr as ApplicationDescr_v0_2 +from .v0_3 import ApplicationDescr as ApplicationDescr +from .v0_3 import ApplicationDescr as ApplicationDescr_v0_3 + +AnyApplicationDescr = Annotated[ + Union[ApplicationDescr_v0_2, ApplicationDescr_v0_3], Discriminator("format_version") +] +"""Union of any released application desription""" +# autogen: stop diff --git a/bioimageio/spec/application/v0_2.py b/bioimageio/spec/application/v0_2.py new file mode 100644 index 000000000..83d168fd4 --- /dev/null +++ b/bioimageio/spec/application/v0_2.py @@ -0,0 +1,48 @@ +from typing import Literal, Optional + +from pydantic import Field +from typing_extensions import Annotated + +from .._internal.common_nodes import Node +from .._internal.io_basics import AbsoluteFilePath as AbsoluteFilePath +from .._internal.types import ApplicationId as ApplicationId +from .._internal.types import ImportantFileSource +from .._internal.url import HttpUrl as HttpUrl +from ..generic.v0_2 import AttachmentsDescr as AttachmentsDescr +from ..generic.v0_2 import Author as Author +from ..generic.v0_2 import BadgeDescr as BadgeDescr +from ..generic.v0_2 import CiteEntry as CiteEntry +from ..generic.v0_2 import Doi as Doi +from ..generic.v0_2 import GenericDescrBase +from ..generic.v0_2 import LinkedResource as LinkedResource +from ..generic.v0_2 import Maintainer as Maintainer +from ..generic.v0_2 import OrcidId as OrcidId +from ..generic.v0_2 import RelativeFilePath as RelativeFilePath +from ..generic.v0_2 import ResourceId as ResourceId +from ..generic.v0_2 import Uploader as Uploader +from ..generic.v0_2 import Version as Version + + +class ApplicationDescr(GenericDescrBase, title="bioimage.io application specification"): + """Bioimage.io description of an application.""" + + type: Literal["application"] = "application" + + id: Optional[ApplicationId] = None + """Model zoo (bioimage.io) wide, unique identifier (assigned by bioimage.io)""" + + source: Annotated[ + Optional[ImportantFileSource], + Field(description="URL or path to the source of the application"), + ] = None + """The primary source of the application""" + + +class LinkedApplication(Node): + """Reference to a bioimage.io application.""" + + id: ApplicationId + """A valid application `id` from the bioimage.io collection.""" + + version_number: Optional[int] = None + """version number (n-th published version, not the semantic version) of linked application""" diff --git a/bioimageio/spec/application/v0_3.py b/bioimageio/spec/application/v0_3.py new file mode 100644 index 000000000..38054021a --- /dev/null +++ b/bioimageio/spec/application/v0_3.py @@ -0,0 +1,52 @@ +from typing import Literal, Optional + +from pydantic import Field +from typing_extensions import Annotated + +from .._internal.common_nodes import Node +from .._internal.io import FileDescr as FileDescr +from .._internal.io import Sha256 as Sha256 +from .._internal.io_basics import AbsoluteFilePath as AbsoluteFilePath +from .._internal.types import ApplicationId as ApplicationId +from .._internal.types import ImportantFileSource +from .._internal.url import HttpUrl as HttpUrl +from ..generic.v0_3 import Author as Author +from ..generic.v0_3 import BadgeDescr as BadgeDescr +from ..generic.v0_3 import CiteEntry as CiteEntry +from ..generic.v0_3 import Doi as Doi +from ..generic.v0_3 import GenericDescrBase +from ..generic.v0_3 import LinkedResource as LinkedResource +from ..generic.v0_3 import Maintainer as Maintainer +from ..generic.v0_3 import OrcidId as OrcidId +from ..generic.v0_3 import RelativeFilePath as RelativeFilePath +from ..generic.v0_3 import ResourceId as ResourceId +from ..generic.v0_3 import Uploader as Uploader +from ..generic.v0_3 import Version as Version + + +class ApplicationDescr(GenericDescrBase, title="bioimage.io application specification"): + """Bioimage.io description of an application.""" + + type: Literal["application"] = "application" + + id: Optional[ApplicationId] = None + """Model zoo (bioimage.io) wide, unique identifier (assigned by bioimage.io)""" + + parent: Optional[ApplicationId] = None + """The description from which this one is derived""" + + source: Annotated[ + Optional[ImportantFileSource], + Field(description="URL or path to the source of the application"), + ] = None + """The primary source of the application""" + + +class LinkedApplication(Node): + """Reference to a bioimage.io application.""" + + id: ApplicationId + """A valid application `id` from the bioimage.io collection.""" + + version_number: int + """version number (n-th published version, not the semantic version) of linked application""" diff --git a/bioimageio/spec/collection/__init__.py b/bioimageio/spec/collection/__init__.py index d8a7cc745..c44985976 100644 --- a/bioimageio/spec/collection/__init__.py +++ b/bioimageio/spec/collection/__init__.py @@ -1,14 +1,20 @@ -from . import v0_2 - -# autogen: start -from . import converters, raw_nodes, schema, utils -from .raw_nodes import FormatVersion - -try: - from typing import get_args -except ImportError: - from typing_extensions import get_args # type: ignore - -format_version = get_args(FormatVersion)[-1] - -# autogen: stop +# autogen: start +""" +implementaions of all released minor versions are available in submodules: +- collection v0_2: `bioimageio.spec.collection.v0_2.CollectionDescr` [user documentation](../../../user_docs/collection_descr_v0-2.md) +- collection v0_3: `bioimageio.spec.collection.v0_3.CollectionDescr` [user documentation](../../../user_docs/collection_descr_v0-3.md) +""" +from typing import Union + +from pydantic import Discriminator +from typing_extensions import Annotated + +from .v0_2 import CollectionDescr as CollectionDescr_v0_2 +from .v0_3 import CollectionDescr as CollectionDescr +from .v0_3 import CollectionDescr as CollectionDescr_v0_3 + +AnyCollectionDescr = Annotated[ + Union[CollectionDescr_v0_2, CollectionDescr_v0_3], Discriminator("format_version") +] +"""Union of any released collection desription""" +# autogen: stop diff --git a/bioimageio/spec/collection/converters.py b/bioimageio/spec/collection/converters.py deleted file mode 100644 index b296f1351..000000000 --- a/bioimageio/spec/collection/converters.py +++ /dev/null @@ -1,3 +0,0 @@ -# Auto-generated by generate_passthrough_modules.py - do not modify - -from .v0_2.converters import * diff --git a/bioimageio/spec/collection/raw_nodes.py b/bioimageio/spec/collection/raw_nodes.py deleted file mode 100644 index bb25c3c1f..000000000 --- a/bioimageio/spec/collection/raw_nodes.py +++ /dev/null @@ -1,3 +0,0 @@ -# Auto-generated by generate_passthrough_modules.py - do not modify - -from .v0_2.raw_nodes import * diff --git a/bioimageio/spec/collection/schema.py b/bioimageio/spec/collection/schema.py deleted file mode 100644 index 9a6b0a4b9..000000000 --- a/bioimageio/spec/collection/schema.py +++ /dev/null @@ -1,3 +0,0 @@ -# Auto-generated by generate_passthrough_modules.py - do not modify - -from .v0_2.schema import * diff --git a/bioimageio/spec/collection/utils.py b/bioimageio/spec/collection/utils.py deleted file mode 100644 index 1086c2eb1..000000000 --- a/bioimageio/spec/collection/utils.py +++ /dev/null @@ -1,3 +0,0 @@ -# Auto-generated by generate_passthrough_modules.py - do not modify - -from .v0_2.utils import * diff --git a/bioimageio/spec/collection/v0_2.py b/bioimageio/spec/collection/v0_2.py new file mode 100644 index 000000000..7994eb14f --- /dev/null +++ b/bioimageio/spec/collection/v0_2.py @@ -0,0 +1,295 @@ +import collections.abc +from functools import partial +from types import MappingProxyType +from typing import Any, Dict, List, Literal, Optional, Union, get_args + +from pydantic import ( + PrivateAttr, + model_validator, +) +from typing_extensions import Self + +from .._build_description import build_description_impl, get_rd_class_impl +from .._internal.common_nodes import InvalidDescr, Node +from .._internal.field_warning import issue_warning +from .._internal.io import BioimageioYamlContent, YamlValue +from .._internal.io_basics import AbsoluteFilePath as AbsoluteFilePath +from .._internal.io_utils import open_bioimageio_yaml +from .._internal.types import ApplicationId as ApplicationId +from .._internal.types import CollectionId as CollectionId +from .._internal.types import DatasetId as DatasetId +from .._internal.types import ModelId as ModelId +from .._internal.types import NotebookId as NotebookId +from .._internal.types import NotEmpty +from .._internal.url import HttpUrl as HttpUrl +from .._internal.validation_context import validation_context_var +from .._internal.warning_levels import ALERT +from ..application import ApplicationDescr_v0_2, ApplicationDescr_v0_3 +from ..dataset import DatasetDescr_v0_2, DatasetDescr_v0_3 +from ..generic import GenericDescr_v0_2, GenericDescr_v0_3 +from ..generic.v0_2 import AttachmentsDescr as AttachmentsDescr +from ..generic.v0_2 import Author as Author +from ..generic.v0_2 import BadgeDescr as BadgeDescr +from ..generic.v0_2 import CiteEntry as CiteEntry +from ..generic.v0_2 import Doi as Doi +from ..generic.v0_2 import FileSource, GenericDescrBase +from ..generic.v0_2 import LinkedResource as LinkedResource +from ..generic.v0_2 import Maintainer as Maintainer +from ..generic.v0_2 import OrcidId as OrcidId +from ..generic.v0_2 import RelativeFilePath as RelativeFilePath +from ..generic.v0_2 import ResourceId as ResourceId +from ..generic.v0_2 import Uploader as Uploader +from ..generic.v0_2 import Version as Version +from ..model import ModelDescr_v0_4, ModelDescr_v0_5 +from ..notebook import NotebookDescr_v0_2, NotebookDescr_v0_3 + +EntryDescr = Union[ + ApplicationDescr_v0_2, + DatasetDescr_v0_2, + GenericDescr_v0_2, + ModelDescr_v0_4, + NotebookDescr_v0_2, +] + +_ENTRY_DESCR_MAP = MappingProxyType( + { + None: MappingProxyType( + { + "0.2": GenericDescr_v0_2, + "0.3": GenericDescr_v0_3, + None: GenericDescr_v0_2, + } + ), + "generic": MappingProxyType( + { + "0.2": GenericDescr_v0_2, + "0.3": GenericDescr_v0_3, + None: GenericDescr_v0_2, + } + ), + "application": MappingProxyType( + { + "0.2": ApplicationDescr_v0_2, + "0.3": ApplicationDescr_v0_3, + None: ApplicationDescr_v0_2, + } + ), + "dataset": MappingProxyType( + { + "0.2": DatasetDescr_v0_2, + "0.3": DatasetDescr_v0_3, + None: DatasetDescr_v0_2, + } + ), + "notebook": MappingProxyType( + { + "0.2": NotebookDescr_v0_2, + "0.3": NotebookDescr_v0_3, + None: NotebookDescr_v0_2, + } + ), + "model": MappingProxyType( + { + "0.3": ModelDescr_v0_4, + "0.4": ModelDescr_v0_4, + "0.5": ModelDescr_v0_5, + None: ModelDescr_v0_4, + } + ), + } +) + + +class CollectionEntry(Node, extra="allow"): + """A valid resource description (RD). + The entry RD is based on the collection description itself. + Fields are added/overwritten by the content of `rdf_source` if `rdf_source` is specified, + and finally added/overwritten by any fields specified directly in the entry. + Except for the `id` field, fields are overwritten entirely, their content is not merged! + The final `id` for each collection entry is composed of the collection's `id` + and the entry's 'sub-'`id`, specified remotely as part of `rdf_source` or superseeded in-place, + such that the `final_entry_id = /`""" + + rdf_source: Optional[FileSource] = None + """resource description file (RDF) source to load entry from""" + + id: Optional[Union[ResourceId, DatasetId, ApplicationId, ModelId, NotebookId]] = ( + None + ) + """Collection entry sub id overwriting `rdf_source.id`. + The full collection entry's id is the collection's base id, followed by this sub id and separated by a slash '/'.""" + + _descr: Optional[EntryDescr] = PrivateAttr(None) + + @property + def rdf_update(self) -> Dict[str, YamlValue]: + return self.model_extra or {} + + @property + def descr(self) -> Optional[EntryDescr]: + if self._descr is None: + issue_warning( + "Collection entry description not set. Is this entry part of a" + + " Collection? A collection entry only has its `descr` set if it is part" + + " of a valid collection description.", + value=None, + severity=ALERT, + ) + + return self._descr + + +class CollectionDescr( + GenericDescrBase, extra="allow", title="bioimage.io collection specification" +): + """A bioimage.io collection describes several other bioimage.io resources. + Note that collections cannot be nested; resources listed under `collection` may not be collections themselves. + """ + + type: Literal["collection"] = "collection" + + id: Optional[CollectionId] = None + """Model zoo (bioimage.io) wide, unique identifier (assigned by bioimage.io)""" + + collection: NotEmpty[List[CollectionEntry]] + """Collection entries""" + + @model_validator(mode="after") + def finalize_entries(self) -> Self: + context = validation_context_var.get() + common_entry_content = { + k: v + for k, v in self.model_dump(mode="json", exclude_unset=True).items() + if k not in ("id", "collection") + } + common_badges = common_entry_content.pop( + "badges", None + ) # `badges` not valid for model entries + base_id: Optional[CollectionId] = self.id + + seen_entry_ids: Dict[str, int] = {} + + for i, entry in enumerate(self.collection): + entry_data: Dict[str, Any] = dict(common_entry_content) + # set entry specific root as it might be adapted in the presence of an external entry source + entry_root = context.root + entry_file_name = context.file_name + + if entry.rdf_source is not None: + if not context.perform_io_checks: + issue_warning( + "Skipping IO relying validation for collection[{i}]", + value=entry.rdf_source, + msg_context=dict(i=i), + ) + continue + + external_data = open_bioimageio_yaml(entry.rdf_source) + # add/overwrite common collection entry content with external source + entry_data.update(external_data.content) + entry_root = external_data.original_root + entry_file_name = external_data.original_file_name + + # add/overwrite common+external entry content with in-place entry update + entry_data.update(entry.rdf_update) + + # also update explicitly specified `id` field data + if entry.id is not None: + entry_data["id"] = entry.id + + if "id" in entry_data: + entry_id = str(entry_data["id"]) + if (seen_i := seen_entry_ids.get(entry_id)) is not None: + raise ValueError( + f"Dublicate `id` '{entry_data['id']}' in" + + f" collection[{seen_i}]/collection[{i}]" + ) + + seen_entry_ids[entry_id] = i + else: + raise ValueError(f"Missing `id` for entry {i}") + + if base_id is not None: + entry_data["id"] = f"{base_id}/{entry_data['id']}" + + type_ = entry_data.get("type") + if type_ == "collection": + raise ValueError( + f"collection[{i}] has invalid entry type; collections may not be" + + " nested!" + ) + + if ( + type_ != "model" + and common_badges is not None + and "badges" not in entry_data + ): + # set badges from the collection root for non-model resources if not set for this specific entry + entry_data["badges"] = common_badges + + entry_descr = build_description_impl( + entry_data, + context=context.replace(root=entry_root, file_name=entry_file_name), + get_rd_class=partial( + get_rd_class_impl, descriptions_map=_ENTRY_DESCR_MAP + ), + ) + assert entry_descr.validation_summary is not None + if isinstance(entry_descr, InvalidDescr): + raise ValueError( + "Invalid collection entry" + + f" collection[{i}]:\n" + + f"{entry_descr.validation_summary.format(hide_source=True, hide_env=True, root_loc=('collection', i))}" + ) + elif isinstance( + entry_descr, get_args(EntryDescr) + ): # TODO: use EntryDescr as union (py>=3.10) + entry._descr = entry_descr # type: ignore + else: + raise ValueError( + f"{entry_descr.type} {entry_descr.format_version} entries" + + f" are not allowed in {self.type} {self.format_version}." + ) + + return self + + @model_validator(mode="before") + @classmethod + def move_groups_to_collection_field( + cls, data: BioimageioYamlContent + ) -> BioimageioYamlContent: + if data.get("format_version") not in ("0.2.0", "0.2.1"): + return data + + if "collection" in data and data["collection"] is not None: + if not isinstance(data["collection"], collections.abc.Sequence): + raise ValueError( + "Expected `collection` to not be present, or to be a list" + ) + + data["collection"] = list(data["collection"]) + else: + data["collection"] = [] + + for group in ["application", "model", "dataset", "notebook"]: + if group in data: + data["collection"] += data[group] # type: ignore + data["collection"][-1]["type"] = group + + config = data.get("config") + if config and isinstance(config, dict): + id_ = config.pop("id", data.get("id")) + if id_ is not None: + data["id"] = id_ + + return data + + +class LinkedCollection(Node): + """Reference to a bioimage.io collection.""" + + id: CollectionId + """A valid collection `id` from the bioimage.io collection.""" + + version_number: Optional[int] = None + """version number (n-th published version, not the semantic version) of linked collection""" diff --git a/bioimageio/spec/collection/v0_2/__init__.py b/bioimageio/spec/collection/v0_2/__init__.py deleted file mode 100644 index 431dc6a3e..000000000 --- a/bioimageio/spec/collection/v0_2/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -from . import converters, raw_nodes, schema, utils -from .raw_nodes import FormatVersion - -try: - from typing import get_args -except ImportError: - from typing_extensions import get_args # type: ignore - -format_version = get_args(FormatVersion)[-1] diff --git a/bioimageio/spec/collection/v0_2/converters.py b/bioimageio/spec/collection/v0_2/converters.py deleted file mode 100644 index d94d5305b..000000000 --- a/bioimageio/spec/collection/v0_2/converters.py +++ /dev/null @@ -1,25 +0,0 @@ -import copy -from typing import Any, Dict - -from bioimageio.spec.rdf.v0_2.converters import maybe_convert as maybe_convert_rdf - - -def maybe_convert(data: Dict[str, Any]) -> Dict[str, Any]: - data = copy.deepcopy(data) - if data.get("format_version") in ("0.2.0", "0.2.1"): - # move all type groups to the 'collection' field - if "collection" not in data: - data["collection"] = [] - - for group in ["application", "model", "dataset", "notebook"]: - if group in data: - data["collection"] += data[group] - data["collection"][-1]["type"] = group - - config = data.get("config") - if config and isinstance(config, dict): - id_ = config.pop("id", data.get("id")) - if id_ is not None: - data["id"] = id_ - - return maybe_convert_rdf(data) diff --git a/bioimageio/spec/collection/v0_2/raw_nodes.py b/bioimageio/spec/collection/v0_2/raw_nodes.py deleted file mode 100644 index 5a0973193..000000000 --- a/bioimageio/spec/collection/v0_2/raw_nodes.py +++ /dev/null @@ -1,106 +0,0 @@ -""" raw nodes for the collection RDF spec - -raw nodes are the deserialized equivalent to the content of any RDF. -serialization and deserialization are defined in schema: -RDF <--schema--> raw nodes -""" -import pathlib -from dataclasses import dataclass -from pathlib import Path -from typing import Any, Dict, List, Union - -import packaging.version -from marshmallow import missing -from marshmallow.utils import _Missing - -from bioimageio.spec.rdf.v0_2.raw_nodes import Author, Badge, CiteEntry, Maintainer, RDF_Base -from bioimageio.spec.shared.raw_nodes import RawNode, URI - -try: - from typing import Literal, get_args -except ImportError: - from typing_extensions import Literal, get_args # type: ignore - -FormatVersion = Literal[ - "0.2.0", "0.2.1", "0.2.2", "0.2.3" -] # newest format needs to be last (used to determine latest format version) - - -@dataclass -class CollectionEntry(RawNode): - rdf_source: Union[_Missing, URI] = missing - rdf_update: Dict[str, Any] = missing - - def __init__( - self, rdf_source: Union[_Missing, URI] = missing, rdf_update: Dict[str, Any] = missing, **implicit_rdf_update - ): - self.rdf_source = rdf_source - self.rdf_update = rdf_update or {} - self.rdf_update.update(implicit_rdf_update) - super().__init__() - - -@dataclass -class Collection(RDF_Base): - collection: List[CollectionEntry] = missing - unknown: Dict[str, Any] = missing - # manual __init__ to allow for unknown kwargs - def __init__( - self, - *, - # ResourceDescription - format_version: FormatVersion, - name: str, - type: str = missing, - version: Union[_Missing, packaging.version.Version] = missing, - root_path: pathlib.Path = pathlib.Path(), - # RDF - attachments: Union[_Missing, Dict[str, Any]] = missing, - authors: Union[_Missing, List[Author]] = missing, - badges: Union[_Missing, List[Badge]] = missing, - cite: Union[_Missing, List[CiteEntry]] = missing, - config: Union[_Missing, dict] = missing, - covers: Union[_Missing, List[Union[URI, Path]]] = missing, - description: str, - documentation: Union[_Missing, Path, URI] = missing, - git_repo: Union[_Missing, str] = missing, - id: Union[_Missing, str] = missing, - icon: Union[_Missing, str] = missing, - license: Union[_Missing, str] = missing, - links: Union[_Missing, List[str]] = missing, - maintainers: Union[_Missing, List[Maintainer]] = missing, - rdf_source: Union[_Missing, URI] = missing, - source: Union[_Missing, URI, Path] = missing, - tags: Union[_Missing, List[str]] = missing, - # collection RDF - collection: List[CollectionEntry], - unknown: Dict[str, Any] = missing, - **implicitly_unknown, - ): - self.collection = collection - self.unknown = unknown or {} - self.unknown.update(implicitly_unknown) - super().__init__( - attachments=attachments, - authors=authors, - badges=badges, - cite=cite, - config=config, - covers=covers, - description=description, - documentation=documentation, - format_version=format_version, - git_repo=git_repo, - icon=icon, - id=id, - license=license, - links=links, - maintainers=maintainers, - name=name, - rdf_source=rdf_source, - root_path=root_path, - source=source, - tags=tags, - type=type, - version=version, - ) diff --git a/bioimageio/spec/collection/v0_2/schema.py b/bioimageio/spec/collection/v0_2/schema.py deleted file mode 100644 index 928fdbb38..000000000 --- a/bioimageio/spec/collection/v0_2/schema.py +++ /dev/null @@ -1,76 +0,0 @@ -from types import ModuleType -from typing import ClassVar, List, Union - -from marshmallow import INCLUDE, missing, validates - -from bioimageio.spec.rdf.v0_2.schema import RDF -from bioimageio.spec.shared import fields -from bioimageio.spec.shared.schema import SharedBioImageIOSchema, WithUnknown -from . import raw_nodes - -try: - from typing import get_args -except ImportError: - from typing_extensions import get_args # type: ignore - - -class _BioImageIOSchema(SharedBioImageIOSchema): - raw_nodes: ClassVar[ModuleType] = raw_nodes - - -class CollectionEntry(_BioImageIOSchema, WithUnknown): - field_name_unknown_dict = "rdf_update" - rdf_source = fields.Union([fields.URL(), fields.DOI()]) - - -class Collection(_BioImageIOSchema, WithUnknown, RDF): - class Meta: - unknown = INCLUDE - - bioimageio_description = f"""# BioImage.IO Collection Resource Description File Specification {get_args(raw_nodes.FormatVersion)[-1]} -This specification defines the fields used in a BioImage.IO-compliant resource description file (`RDF`) for describing collections of other resources. -These fields are typically stored in a YAML file which we call Collection Resource Description File or `collection RDF`. - -The collection RDF YAML file contains mandatory and optional fields. In the following description, optional fields are indicated by _optional_. -_optional*_ with an asterisk indicates the field is optional depending on the value in another field. -""" - collection = fields.List( - fields.Nested(CollectionEntry()), - bioimageio_description="Collection entries. Each entry needs to specify a valid RDF with an id. " - "Each collection entry RDF is based on the collection RDF itself, " - "updated by rdf_source content if rdf_source is specified, " - "and updated by any fields specified directly in the entry. " - "In this context 'update' refers to overwriting RDF root fields by name." - "Except for the `id` field, which appends to the collection RDF `id` " - "such that full_collection_entry_id=/", - required=True, - ) - - @validates("collection") - def unique_ids(self, value: List[Union[dict, raw_nodes.CollectionEntry]]): - ids = [ - (v.get("id", missing), v.get("rdf_source", missing)) - if isinstance(v, dict) - else (v.rdf_update.get("id", missing), v.rdf_source) - for v in value - ] - # skip check for id only specified in remote source - ids = [vid for vid, vs in ids if not (vid is missing and vs is not missing)] - - if missing in ids: - raise ValueError(f"Missing ids in collection entries") - - non_string_ids = [v for v in ids if not isinstance(v, str)] - if non_string_ids: - raise ValueError(f"Non-string ids in collection: {non_string_ids}") - - seen = set() - duplicates = [] - for v in ids: - if v in seen: - duplicates.append(v) - else: - seen.add(v) - - if duplicates: - raise ValueError(f"Duplicate ids in collection: {duplicates}") diff --git a/bioimageio/spec/collection/v0_2/utils.py b/bioimageio/spec/collection/v0_2/utils.py deleted file mode 100644 index f3ee13d30..000000000 --- a/bioimageio/spec/collection/v0_2/utils.py +++ /dev/null @@ -1,118 +0,0 @@ -import os -import pathlib -import warnings -from typing import Callable, List, Optional, Tuple, Union - -from marshmallow import missing -from marshmallow.utils import _Missing - -from . import raw_nodes, schema -from bioimageio.spec.shared.raw_nodes import ResourceDescription as RawResourceDescription - - -def filter_resource_description(raw_rd: raw_nodes.RDF_Base) -> raw_nodes.RDF_Base: - return raw_rd - - -def default_enrich_partial_rdf(partial_rdf: dict, root: Union[raw_nodes.URI, pathlib.Path]) -> dict: - return partial_rdf - - -def resolve_collection_entries( - collection: raw_nodes.Collection, - collection_id: Optional[str] = None, - update_to_format: Optional[str] = None, - enrich_partial_rdf: Callable[[dict, Union[raw_nodes.URI, pathlib.Path]], dict] = default_enrich_partial_rdf, -) -> List[Tuple[Optional[RawResourceDescription], Optional[str]]]: - """ - - Args: - collection: collection node to resolve entries of - collection_id: (optional)ly overwrite collection.id - update_to_format: (optional) format version the resolved entries should be updated to - enrich_partial_rdf: (optional) callable to enrich the partial base rdf (inherited from collection) and the - partial entry rdf (only the fields specified in an entry of the collection.collection list of entries) - - Returns: - A list of resolved entries consisting each of a resolved 'raw node' and error=None or 'raw node'=None - and an error message. - """ - from bioimageio.spec import serialize_raw_resource_description_to_dict, load_raw_resource_description - - if collection.id is missing: - warnings.warn("Collection has no id; links may not be resolved.") - - ret = [] - seen_ids = set() - - # rdf entries are based on collection RDF... - rdf_data_base = serialize_raw_resource_description_to_dict(collection) - assert missing not in rdf_data_base.values() - rdf_data_base.pop("collection") # ... without the collection field to avoid recursion - - rdf_data_base = enrich_partial_rdf(rdf_data_base, collection.root_path) # enrich the rdf base - - root_id = rdf_data_base.pop("id", None) if collection_id is None else collection_id - for idx, entry in enumerate(collection.collection): # type: ignore - rdf_data = dict(rdf_data_base) - - entry_error: Optional[str] = None - id_info = f"(id={entry.rdf_update['id']}) " if "id" in entry.rdf_update else "" - - # update rdf entry with entry's rdf_source - sub_id: Union[str, _Missing] = missing - if entry.rdf_source is not missing: - rdf_source = entry.rdf_source - if isinstance(rdf_source, str) and not rdf_source.startswith("http") or isinstance(rdf_source, os.PathLike): - # a relative rdf_source path is relative to collection.root_path - rdf_source = collection.root_path / pathlib.Path(rdf_source) - - try: - source_entry_rd = load_raw_resource_description(rdf_source) - except Exception as e: - entry_error = f"collection[{idx}]: {id_info}Invalid rdf_source: {e}" - else: - source_entry_data = serialize_raw_resource_description_to_dict(source_entry_rd) - sub_id = source_entry_data.pop("id", missing) - assert missing not in source_entry_data.values() - source_entry_data = enrich_partial_rdf(source_entry_data, collection.root_path) # enrich entry data - rdf_data.update(source_entry_data) - - # update rdf entry with fields specified directly in the entry - rdf_update = entry.rdf_update - rdf_update = enrich_partial_rdf(rdf_update, collection.root_path) # enrich rdf update from entry - assert missing not in rdf_update.values() - sub_id = rdf_update.pop("id", sub_id) - if sub_id is missing: - entry_error = f"collection[{idx}]: Missing `id` field" - elif sub_id in seen_ids: - entry_error = f"collection[{idx}]: Duplicate `id` value {sub_id}" - else: - seen_ids.add(sub_id) - - rdf = None - if entry_error is None: - rdf_data.update(rdf_update) - if root_id is None: - rdf_data["id"] = sub_id - else: - rdf_data["id"] = f"{root_id}/{sub_id}" - - # Convert simple links to links with collection id prepended - if "links" in rdf_data: - for i in range(len(rdf_data["links"])): - link = rdf_data["links"][i] - if "/" not in link and collection.id is not missing: - rdf_data["links"][i] = collection.id + "/" + link - - rdf_data.pop("rdf_source", None) # remove absorbed rdf_source - rdf_data["root_path"] = collection.root_path # collection entry always has the same root as the collection - assert missing not in rdf_data.values() - try: - rdf = load_raw_resource_description(rdf_data, update_to_format=update_to_format) - except Exception as e: - entry_error = str(e) - - ret.append((rdf, entry_error)) - - return ret diff --git a/bioimageio/spec/collection/v0_3.py b/bioimageio/spec/collection/v0_3.py new file mode 100644 index 000000000..d21fbdc58 --- /dev/null +++ b/bioimageio/spec/collection/v0_3.py @@ -0,0 +1,341 @@ +from functools import partial +from types import MappingProxyType +from typing import ( + TYPE_CHECKING, + Any, + Dict, + List, + Literal, + Optional, + Union, + cast, + get_args, +) + +from pydantic import PrivateAttr, model_validator +from typing_extensions import Self + +from .._build_description import build_description_impl, get_rd_class_impl +from .._internal.common_nodes import InvalidDescr, Node +from .._internal.field_warning import issue_warning +from .._internal.io import FileDescr as FileDescr +from .._internal.io import Sha256 as Sha256 +from .._internal.io import YamlValue +from .._internal.io_basics import AbsoluteFilePath as AbsoluteFilePath +from .._internal.io_utils import open_bioimageio_yaml +from .._internal.types import ApplicationId as ApplicationId +from .._internal.types import CollectionId as CollectionId +from .._internal.types import DatasetId as DatasetId +from .._internal.types import FileSource, NotEmpty +from .._internal.types import ModelId as ModelId +from .._internal.types import NotebookId as NotebookId +from .._internal.url import HttpUrl as HttpUrl +from .._internal.validation_context import ( + validation_context_var, +) +from .._internal.warning_levels import ALERT +from ..application import ApplicationDescr_v0_2, ApplicationDescr_v0_3 +from ..dataset import DatasetDescr_v0_2, DatasetDescr_v0_3 +from ..generic import GenericDescr_v0_2, GenericDescr_v0_3 +from ..generic.v0_3 import Author as Author +from ..generic.v0_3 import BadgeDescr as BadgeDescr +from ..generic.v0_3 import CiteEntry as CiteEntry +from ..generic.v0_3 import Doi as Doi +from ..generic.v0_3 import ( + GenericDescrBase, + _author_conv, # pyright: ignore[reportPrivateUsage] + _maintainer_conv, # pyright: ignore[reportPrivateUsage] +) +from ..generic.v0_3 import LinkedResource as LinkedResource +from ..generic.v0_3 import Maintainer as Maintainer +from ..generic.v0_3 import OrcidId as OrcidId +from ..generic.v0_3 import RelativeFilePath as RelativeFilePath +from ..generic.v0_3 import ResourceId as ResourceId +from ..generic.v0_3 import Uploader as Uploader +from ..generic.v0_3 import Version as Version +from ..model import ModelDescr_v0_4, ModelDescr_v0_5 +from ..notebook import NotebookDescr_v0_2, NotebookDescr_v0_3 +from .v0_2 import CollectionDescr as _CollectionDescr_v0_2 + +EntryDescr = Union[ + ApplicationDescr_v0_2, + ApplicationDescr_v0_3, + DatasetDescr_v0_2, + DatasetDescr_v0_3, + ModelDescr_v0_4, + ModelDescr_v0_5, + NotebookDescr_v0_2, + NotebookDescr_v0_3, + GenericDescr_v0_2, + GenericDescr_v0_3, +] + +_ENTRY_DESCR_MAP = MappingProxyType( + { + None: MappingProxyType( + { + "0.2": GenericDescr_v0_2, + "0.3": GenericDescr_v0_3, + None: GenericDescr_v0_3, + } + ), + "generic": MappingProxyType( + { + "0.2": GenericDescr_v0_2, + "0.3": GenericDescr_v0_3, + None: GenericDescr_v0_3, + } + ), + "application": MappingProxyType( + { + "0.2": ApplicationDescr_v0_2, + "0.3": ApplicationDescr_v0_3, + None: ApplicationDescr_v0_3, + } + ), + "dataset": MappingProxyType( + { + "0.2": DatasetDescr_v0_2, + "0.3": DatasetDescr_v0_3, + None: DatasetDescr_v0_3, + } + ), + "notebook": MappingProxyType( + { + "0.2": NotebookDescr_v0_2, + "0.3": NotebookDescr_v0_3, + None: NotebookDescr_v0_3, + } + ), + "model": MappingProxyType( + { + "0.3": ModelDescr_v0_4, + "0.4": ModelDescr_v0_4, + "0.5": ModelDescr_v0_5, + None: ModelDescr_v0_5, + } + ), + } +) + + +class CollectionEntry(Node, extra="allow"): + """A collection entry description is based on the collection description itself. + Fields are added/overwritten by the content of `descr_source` if `descr_source` is set, + and finally added/overwritten by any fields specified directly in the entry. + Except for the `id` field, fields are overwritten entirely, their content is not merged! + The final `id` for each collection entry is composed of the collection's `id` + and the entry's 'sub-'`id`, specified externally in `descr_source` or superseeded in-place, + such that the `final_entry_id = /`""" + + entry_source: Optional[FileSource] = None + """an external source this entry description is based on""" + + id: Optional[Union[ResourceId, DatasetId, ApplicationId, ModelId, NotebookId]] = ( + None + ) + """Collection entry sub id overwriting `rdf_source.id`. + The full collection entry's id is the collection's base id, followed by this sub id and separated by a slash '/'.""" + + _descr: Optional[EntryDescr] = PrivateAttr(None) + + @property + def entry_update(self) -> Dict[str, YamlValue]: + return self.model_extra or {} + + @property + def descr(self) -> Optional[EntryDescr]: + if self._descr is None: + issue_warning( + "Collection entry description not set. Is this entry part of a" + + " Collection? A collection entry only has its `descr` set if it is part" + + " of a valid collection description.", + value=None, + severity=ALERT, + ) + + return self._descr + + +class CollectionDescr( + GenericDescrBase, extra="allow", title="bioimage.io collection specification" +): + """A bioimage.io collection resource description file (collection RDF) describes a collection of bioimage.io + resources. + The resources listed in a collection RDF have types other than 'collection'; collections cannot be nested. + """ + + type: Literal["collection"] = "collection" + + id: Optional[CollectionId] = None + """Model zoo (bioimage.io) wide, unique identifier (assigned by bioimage.io)""" + + parent: Optional[CollectionId] = None + """The description from which this one is derived""" + + collection: NotEmpty[List[CollectionEntry]] + """Collection entries""" + + @model_validator(mode="after") + def finalize_entries(self) -> Self: + context = validation_context_var.get() + common_entry_content = { + k: v + for k, v in self.model_dump(mode="json", exclude_unset=True).items() + if k not in ("id", "collection") + } + common_badges = common_entry_content.pop( + "badges", None + ) # `badges` not valid for model entries + base_id: Optional[CollectionId] = self.id + + seen_entry_ids: Dict[str, int] = {} + + for i, entry in enumerate(self.collection): + entry_data: Dict[str, Any] = dict(common_entry_content) + # set entry specific root as it might be adapted in the presence of an external entry source + entry_root = context.root + entry_file_name = context.file_name + + if entry.entry_source is not None: + if not context.perform_io_checks: + issue_warning( + "Skipping IO relying validation for collection[{i}]", + value=entry.entry_source, + msg_context=dict(i=i), + ) + continue + + external_data = open_bioimageio_yaml(entry.entry_source) + # add/overwrite common collection entry content with external source + entry_data.update(external_data.content) + entry_root = external_data.original_root + entry_file_name = external_data.original_file_name + + # add/overwrite common+external entry content with in-place entry update + entry_data.update(entry.entry_update) + + # also update explicitly specified `id` field data + if entry.id is not None: + entry_data["id"] = entry.id + + if "id" in entry_data: + entry_id = str(entry_data["id"]) + if (seen_i := seen_entry_ids.get(entry_id)) is not None: + raise ValueError( + f"Dublicate `id` '{entry_data['id']}' in" + + f" collection[{seen_i}]/collection[{i}]" + ) + + seen_entry_ids[entry_id] = i + else: + raise ValueError(f"Missing `id` for entry {i}") + + if base_id is not None: + entry_data["id"] = f"{base_id}/{entry_data['id']}" + + type_ = entry_data.get("type") + if type_ == "collection": + raise ValueError( + f"collection[{i}].type may not be 'collection'; collections may not" + + " be nested!" + ) + + if ( + type_ != "model" + and common_badges is not None + and "badges" not in entry_data + ): + # set badges from the collection root for non-model resources if not set for this specific entry + entry_data["badges"] = common_badges + + entry_descr = build_description_impl( + entry_data, + context=context.replace(root=entry_root, file_name=entry_file_name), + get_rd_class=partial( + get_rd_class_impl, descriptions_map=_ENTRY_DESCR_MAP + ), + ) + + assert entry_descr.validation_summary is not None + if isinstance(entry_descr, InvalidDescr): + raise ValueError( + "Invalid collection entry" + + f" collection[{i}]:\n" + + f"{entry_descr.validation_summary.format(hide_source=True, hide_env=True, root_loc=('collection', i))}" + ) + elif isinstance( + entry_descr, get_args(EntryDescr) + ): # TODO: use EntryDescr as union (py>=3.10) + entry._descr = entry_descr # type: ignore + else: + raise ValueError( + f"{entry_descr.type} {entry_descr.format_version} entries " + + f"are not allowed in {self.type} {self.format_version}." + ) + return self + + @model_validator(mode="before") + @classmethod + def _convert(cls, data: Dict[str, Any], /) -> Dict[str, Any]: + if ( + data.get("type") == "collection" + and isinstance(fv := data.get("format_version"), str) + and fv.startswith("0.2.") + ): + old = _CollectionDescr_v0_2.load(data) + if isinstance(old, InvalidDescr): + return data + + return cast( + Dict[str, Any], + (cls if TYPE_CHECKING else dict)( + attachments=( + [] + if old.attachments is None + else [FileDescr(source=f) for f in old.attachments.files] + ), + authors=[_author_conv.convert_as_dict(a) for a in old.authors], + badges=old.badges, + cite=[ + {"text": c.text, "doi": c.doi, "url": c.url} for c in old.cite + ], + collection=[ + (CollectionEntry if TYPE_CHECKING else dict)( + entry_source=entry.rdf_source, id=entry.id, **entry.rdf_update # type: ignore + ) + for entry in old.collection + ], + config=old.config, + covers=old.covers, + description=old.description, + documentation=old.documentation, + format_version="0.3.0", + git_repo=old.git_repo, + icon=old.icon, + id=old.id, + license=old.license, + links=old.links, + maintainers=[ + _maintainer_conv.convert_as_dict(m) for m in old.maintainers + ], + name=old.name, + tags=old.tags, + type=old.type, + uploader=old.uploader, + version=old.version, + **(old.model_extra or {}), + ), + ) + + return data + + +class LinkedCollection(Node): + """Reference to a bioimage.io collection.""" + + id: CollectionId + """A valid collection `id` from the bioimage.io collection.""" + + version_number: int + """version number (n-th published version, not the semantic version) of linked collection""" diff --git a/bioimageio/spec/commands.py b/bioimageio/spec/commands.py deleted file mode 100644 index 422f9cce8..000000000 --- a/bioimageio/spec/commands.py +++ /dev/null @@ -1,222 +0,0 @@ -import os -import traceback -import warnings -from pathlib import Path -from typing import Any, Callable, Dict, IO, List, Optional, Union - -from marshmallow import ValidationError - -from .collection.v0_2.utils import default_enrich_partial_rdf, resolve_collection_entries -from .io_ import ( - load_raw_resource_description, - resolve_rdf_source, - save_raw_resource_description, - serialize_raw_resource_description_to_dict, -) -from .shared import update_nested -from .shared.common import ValidationSummary, ValidationWarning, nested_default_dict_as_nested_dict, yaml -from .shared.raw_nodes import ResourceDescription as RawResourceDescription, URI -from .v import __version__ - - -def update_format( - rdf_source: Union[dict, os.PathLike, IO, str, bytes], - path: Union[os.PathLike, str], - update_to_format: str = "latest", -): - """Auto-update fields of a BioImage.IO resource""" - raw = load_raw_resource_description(rdf_source, update_to_format=update_to_format) - save_raw_resource_description(raw, Path(path)) - - -def validate( - rdf_source: Union[RawResourceDescription, dict, os.PathLike, IO, str, bytes], - update_format: bool = False, - update_format_inner: Optional[bool] = None, - verbose: bool = "deprecated", # type: ignore - enrich_partial_rdf: Callable[[dict, Union[URI, Path]], dict] = default_enrich_partial_rdf, -) -> ValidationSummary: - """Validate a BioImage.IO Resource Description File (RDF). - - Args: - rdf_source: resource description as path, url or bytes of an RDF or packaged resource, or as yaml string or dict - update_format: weather or not to apply auto-conversion to the latest format version before validation - update_format_inner: (applicable to `collections` resources only) `update_format` for nested resources - verbose: deprecated - enrich_partial_rdf: (optional) callable to customize RDF data on the fly. - Don't use this if you don't know exactly what to do with it. - - Returns: - A summary dict with keys: - bioimageio_spec_version, - error, - name, - nested_errors, - source_name, - status, - traceback, - warnings, - """ - if verbose != "deprecated": - warnings.warn("'verbose' flag is deprecated") - - if update_format_inner is None: - update_format_inner = update_format - - error: Union[None, str, Dict[str, Any]] = None - tb = None - nested_errors: Dict[str, dict] = {} - with warnings.catch_warnings(record=True) as warnings1: - if isinstance(rdf_source, RawResourceDescription): - source_name = rdf_source.name - else: - try: - rdf_source_preview, source_name, root = resolve_rdf_source(rdf_source) - except Exception as e: - error = str(e) - tb = traceback.format_tb(e.__traceback__) - try: - source_name = str(rdf_source) - except Exception as e: - source_name = str(e) - else: - if not isinstance(rdf_source_preview, dict): - error = f"expected loaded resource to be a dictionary, but got type {type(rdf_source_preview)}" - - all_warnings = warnings1 or [] - raw_rd = None - format_version = "" - resource_type = "" - if not error: - with warnings.catch_warnings(record=True) as warnings2: - try: - raw_rd = load_raw_resource_description(rdf_source, update_to_format="latest" if update_format else None) - except ValidationError as e: - error = nested_default_dict_as_nested_dict(e.normalized_messages()) - except Exception as e: - error = str(e) - tb = traceback.format_tb(e.__traceback__) - - if raw_rd is not None: - format_version = raw_rd.format_version - resource_type = "general" if raw_rd.type == "rdf" else raw_rd.type - - if raw_rd is not None and raw_rd.type == "collection": - assert hasattr(raw_rd, "collection") - for idx, (entry_rdf, entry_error) in enumerate(resolve_collection_entries(raw_rd, enrich_partial_rdf=enrich_partial_rdf)): # type: ignore - if entry_error: - entry_summary: Union[Dict[str, str], ValidationSummary] = {"error": entry_error} - else: - assert isinstance(entry_rdf, RawResourceDescription) - entry_summary = validate( - entry_rdf, update_format=update_format, update_format_inner=update_format_inner - ) - - wrns: Union[str, dict] = entry_summary.get("warnings", {}) - assert isinstance(wrns, dict) - id_info = f"(id={entry_rdf.id}) " if hasattr(entry_rdf, "id") else "" # type: ignore - for k, v in wrns.items(): - warnings.warn(f"collection[{idx}]:{k}: {id_info}{v}", category=ValidationWarning) - - if entry_summary["error"]: - if "collection" not in nested_errors: - nested_errors["collection"] = {} - - nested_errors["collection"][idx] = entry_summary["error"] - - if nested_errors: - # todo: make short error message and refer to 'nested_errors' or deprecated 'nested_errors' - error = nested_errors - - all_warnings += warnings2 or [] - - return { - "bioimageio_spec_version": __version__, - "error": error, - "name": ( - f"bioimageio.spec static validation of {resource_type} RDF {format_version}" - f"{' with update to latest format version' if update_format else ''}" - ), - "nested_errors": nested_errors, - "source_name": source_name, - "status": "passed" if error is None else "failed", - "traceback": tb, - "warnings": ValidationWarning.get_warning_summary(all_warnings), - } - - -def update_rdf( - source: Union[RawResourceDescription, dict, os.PathLike, IO, str, bytes], - update: Union[RawResourceDescription, dict, os.PathLike, IO, str, bytes], - output: Union[None, dict, os.PathLike] = None, - validate_output: bool = True, -) -> Union[dict, Path, RawResourceDescription]: - """ - Args: - source: source of RDF - update: a (partial) RDF used as update - output: dict or path to write output to (default: return new dict) - validate_output: whether or not to validate the updated RDF - - Returns: - The updated content of the source rdf as dict or, - if output is a path, that path (where the updated content is saved to). - - Raises: - ValidationError: if `validate_output` and the updated rdf does not pass validation - """ - if isinstance(source, RawResourceDescription): - src = source - else: - src = load_raw_resource_description(source) - - up = resolve_rdf_source(update) - - if src.root_path != up.root and not validate_output: - warnings.warn( - f"root path of source {src.name} and update {up.name} differ. Relative paths might be invalid in the output." - ) - - out_data = update_nested(src, up.data) - assert isinstance(out_data, (RawResourceDescription, dict)) - if validate_output: - summary = validate(out_data) - if summary["warnings"]: - warnings.warn(f"updated rdf validation warnings\n: {summary['warnings']}") - if summary["status"] != "passed": - msg = f"updated rdf did not pass validation; status: {summary['status']}" - if summary["error"]: - msg += f"; error: {summary['error']}" - - raise ValidationError(msg) - - if output is None: - if isinstance(source, RawResourceDescription): - return load_raw_resource_description(out_data) - else: - output = {} - - if isinstance(output, dict): - if isinstance(out_data, RawResourceDescription): - out_data = serialize_raw_resource_description_to_dict(out_data, convert_absolute_paths=False) - - assert isinstance(out_data, dict) - output.update(out_data) - return output - else: - assert yaml is not None - output = Path(output) - if isinstance(out_data, RawResourceDescription): - out_data.root_path = output.parent - try: - out_data = serialize_raw_resource_description_to_dict(out_data, convert_absolute_paths=True) - except ValueError as e: - warnings.warn( - f"Failed to convert paths in updated rdf to relative paths with root {output}; error: {e}" - ) - warnings.warn(f"updated rdf at {output} contains absolute paths and is thus invalid!") - assert isinstance(out_data, RawResourceDescription) - out_data = serialize_raw_resource_description_to_dict(out_data, convert_absolute_paths=False) - - yaml.dump(out_data, output) - return output diff --git a/bioimageio/spec/common.py b/bioimageio/spec/common.py new file mode 100644 index 000000000..687d76a54 --- /dev/null +++ b/bioimageio/spec/common.py @@ -0,0 +1,14 @@ +from pydantic import ValidationError as ValidationError + +from ._internal.common_nodes import InvalidDescr as InvalidDescr +from ._internal.io import BioimageioYamlContent as BioimageioYamlContent +from ._internal.io import BioimageioYamlSource as BioimageioYamlSource +from ._internal.io import FileDescr as FileDescr +from ._internal.io import Sha256 as Sha256 +from ._internal.io import YamlValue as YamlValue +from ._internal.io_basics import FileName as FileName +from ._internal.root_url import RootHttpUrl as RootHttpUrl +from ._internal.types import FileSource as FileSource +from ._internal.types import PermissiveFileSource as PermissiveFileSource +from ._internal.types import RelativeFilePath as RelativeFilePath +from ._internal.url import HttpUrl as HttpUrl diff --git a/bioimageio/spec/dataset/__init__.py b/bioimageio/spec/dataset/__init__.py index d8a7cc745..76db43e0b 100644 --- a/bioimageio/spec/dataset/__init__.py +++ b/bioimageio/spec/dataset/__init__.py @@ -1,14 +1,20 @@ -from . import v0_2 - -# autogen: start -from . import converters, raw_nodes, schema, utils -from .raw_nodes import FormatVersion - -try: - from typing import get_args -except ImportError: - from typing_extensions import get_args # type: ignore - -format_version = get_args(FormatVersion)[-1] - -# autogen: stop +# autogen: start +""" +implementaions of all released minor versions are available in submodules: +- dataset v0_2: `bioimageio.spec.dataset.v0_2.DatasetDescr` [user documentation](../../../user_docs/dataset_descr_v0-2.md) +- dataset v0_3: `bioimageio.spec.dataset.v0_3.DatasetDescr` [user documentation](../../../user_docs/dataset_descr_v0-3.md) +""" +from typing import Union + +from pydantic import Discriminator +from typing_extensions import Annotated + +from .v0_2 import DatasetDescr as DatasetDescr_v0_2 +from .v0_3 import DatasetDescr as DatasetDescr +from .v0_3 import DatasetDescr as DatasetDescr_v0_3 + +AnyDatasetDescr = Annotated[ + Union[DatasetDescr_v0_2, DatasetDescr_v0_3], Discriminator("format_version") +] +"""Union of any released dataset desription""" +# autogen: stop diff --git a/bioimageio/spec/dataset/converters.py b/bioimageio/spec/dataset/converters.py deleted file mode 100644 index b296f1351..000000000 --- a/bioimageio/spec/dataset/converters.py +++ /dev/null @@ -1,3 +0,0 @@ -# Auto-generated by generate_passthrough_modules.py - do not modify - -from .v0_2.converters import * diff --git a/bioimageio/spec/dataset/raw_nodes.py b/bioimageio/spec/dataset/raw_nodes.py deleted file mode 100644 index bb25c3c1f..000000000 --- a/bioimageio/spec/dataset/raw_nodes.py +++ /dev/null @@ -1,3 +0,0 @@ -# Auto-generated by generate_passthrough_modules.py - do not modify - -from .v0_2.raw_nodes import * diff --git a/bioimageio/spec/dataset/schema.py b/bioimageio/spec/dataset/schema.py deleted file mode 100644 index 9a6b0a4b9..000000000 --- a/bioimageio/spec/dataset/schema.py +++ /dev/null @@ -1,3 +0,0 @@ -# Auto-generated by generate_passthrough_modules.py - do not modify - -from .v0_2.schema import * diff --git a/bioimageio/spec/dataset/utils.py b/bioimageio/spec/dataset/utils.py deleted file mode 100644 index 1086c2eb1..000000000 --- a/bioimageio/spec/dataset/utils.py +++ /dev/null @@ -1,3 +0,0 @@ -# Auto-generated by generate_passthrough_modules.py - do not modify - -from .v0_2.utils import * diff --git a/bioimageio/spec/dataset/v0_2.py b/bioimageio/spec/dataset/v0_2.py new file mode 100644 index 000000000..b4dde6ed8 --- /dev/null +++ b/bioimageio/spec/dataset/v0_2.py @@ -0,0 +1,43 @@ +from typing import Literal, Optional + +from .._internal.common_nodes import Node +from .._internal.io_basics import AbsoluteFilePath as AbsoluteFilePath +from .._internal.types import DatasetId as DatasetId +from .._internal.url import HttpUrl as HttpUrl +from ..generic.v0_2 import AttachmentsDescr as AttachmentsDescr +from ..generic.v0_2 import Author as Author +from ..generic.v0_2 import BadgeDescr as BadgeDescr +from ..generic.v0_2 import CiteEntry as CiteEntry +from ..generic.v0_2 import Doi as Doi +from ..generic.v0_2 import GenericDescrBase +from ..generic.v0_2 import LinkedResource as LinkedResource +from ..generic.v0_2 import Maintainer as Maintainer +from ..generic.v0_2 import OrcidId as OrcidId +from ..generic.v0_2 import RelativeFilePath as RelativeFilePath +from ..generic.v0_2 import ResourceId as ResourceId +from ..generic.v0_2 import Uploader as Uploader +from ..generic.v0_2 import Version as Version + + +class DatasetDescr(GenericDescrBase, title="bioimage.io dataset specification"): + """A bioimage.io dataset resource description file (dataset RDF) describes a dataset relevant to bioimage + processing. + """ + + type: Literal["dataset"] = "dataset" + + id: Optional[DatasetId] = None + """Model zoo (bioimage.io) wide, unique identifier (assigned by bioimage.io)""" + + source: Optional[HttpUrl] = None + """"URL to the source of the dataset.""" + + +class LinkedDataset(Node): + """Reference to a bioimage.io dataset.""" + + id: DatasetId + """A valid dataset `id` from the bioimage.io collection.""" + + version_number: Optional[int] = None + """version number (n-th published version, not the semantic version) of linked dataset""" diff --git a/bioimageio/spec/dataset/v0_2/__init__.py b/bioimageio/spec/dataset/v0_2/__init__.py deleted file mode 100644 index 431dc6a3e..000000000 --- a/bioimageio/spec/dataset/v0_2/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -from . import converters, raw_nodes, schema, utils -from .raw_nodes import FormatVersion - -try: - from typing import get_args -except ImportError: - from typing_extensions import get_args # type: ignore - -format_version = get_args(FormatVersion)[-1] diff --git a/bioimageio/spec/dataset/v0_2/converters.py b/bioimageio/spec/dataset/v0_2/converters.py deleted file mode 100644 index 833a36197..000000000 --- a/bioimageio/spec/dataset/v0_2/converters.py +++ /dev/null @@ -1,3 +0,0 @@ -from bioimageio.spec.rdf.v0_2.converters import maybe_convert as maybe_convert_rdf - -maybe_convert = maybe_convert_rdf diff --git a/bioimageio/spec/dataset/v0_2/raw_nodes.py b/bioimageio/spec/dataset/v0_2/raw_nodes.py deleted file mode 100644 index 8d2b06559..000000000 --- a/bioimageio/spec/dataset/v0_2/raw_nodes.py +++ /dev/null @@ -1,23 +0,0 @@ -""" raw nodes for the dataset RDF spec - -raw nodes are the deserialized equivalent to the content of any RDF. -serialization and deserialization are defined in schema: -RDF <--schema--> raw nodes -""" -from dataclasses import dataclass - -from marshmallow import missing - -from bioimageio.spec.rdf.v0_2.raw_nodes import FormatVersion, RDF_Base as _RDF - -try: - from typing import Literal -except ImportError: - from typing_extensions import Literal # type: ignore - -FormatVersion = FormatVersion - - -@dataclass -class Dataset(_RDF): - type: Literal["dataset"] = missing diff --git a/bioimageio/spec/dataset/v0_2/schema.py b/bioimageio/spec/dataset/v0_2/schema.py deleted file mode 100644 index 50419ca4f..000000000 --- a/bioimageio/spec/dataset/v0_2/schema.py +++ /dev/null @@ -1,25 +0,0 @@ -from types import ModuleType -from typing import ClassVar - -from bioimageio.spec.rdf.v0_2.schema import RDF -from bioimageio.spec.shared.schema import SharedBioImageIOSchema -from . import raw_nodes - -try: - from typing import get_args -except ImportError: - from typing_extensions import get_args # type: ignore - - -class _BioImageIOSchema(SharedBioImageIOSchema): - raw_nodes: ClassVar[ModuleType] = raw_nodes - - -class Dataset(_BioImageIOSchema, RDF): - bioimageio_description = f"""# BioImage.IO Dataset Resource Description File Specification {get_args(raw_nodes.FormatVersion)[-1]} -This specification defines the fields used in a BioImage.IO-compliant resource description file (`RDF`) for describing datasets. -These fields are typically stored in a YAML file which we call Dataset Resource Description File or `dataset RDF`. - -The dataset RDF YAML file contains mandatory and optional fields. In the following description, optional fields are indicated by _optional_. -_optional*_ with an asterisk indicates the field is optional depending on the value in another field. -""" diff --git a/bioimageio/spec/dataset/v0_2/utils.py b/bioimageio/spec/dataset/v0_2/utils.py deleted file mode 100644 index 53b0f80ff..000000000 --- a/bioimageio/spec/dataset/v0_2/utils.py +++ /dev/null @@ -1,5 +0,0 @@ -from . import raw_nodes - - -def filter_resource_description(raw_rd: raw_nodes.Dataset) -> raw_nodes.Dataset: - return raw_rd diff --git a/bioimageio/spec/dataset/v0_3.py b/bioimageio/spec/dataset/v0_3.py new file mode 100644 index 000000000..b50738622 --- /dev/null +++ b/bioimageio/spec/dataset/v0_3.py @@ -0,0 +1,107 @@ +from typing import TYPE_CHECKING, Any, Dict, Literal, Optional, cast + +from pydantic import model_validator + +from .._internal.common_nodes import InvalidDescr, Node +from .._internal.io import FileDescr as FileDescr +from .._internal.io import Sha256 as Sha256 +from .._internal.io_basics import AbsoluteFilePath as AbsoluteFilePath +from .._internal.types import DatasetId as DatasetId +from .._internal.url import HttpUrl as HttpUrl +from ..generic.v0_3 import Author as Author +from ..generic.v0_3 import BadgeDescr as BadgeDescr +from ..generic.v0_3 import CiteEntry as CiteEntry +from ..generic.v0_3 import ( + DocumentationSource, + GenericDescrBase, + _author_conv, # pyright: ignore[reportPrivateUsage] + _maintainer_conv, # pyright: ignore[reportPrivateUsage] +) +from ..generic.v0_3 import Doi as Doi +from ..generic.v0_3 import LinkedResource as LinkedResource +from ..generic.v0_3 import Maintainer as Maintainer +from ..generic.v0_3 import OrcidId as OrcidId +from ..generic.v0_3 import RelativeFilePath as RelativeFilePath +from ..generic.v0_3 import ResourceId as ResourceId +from ..generic.v0_3 import Uploader as Uploader +from ..generic.v0_3 import Version as Version +from .v0_2 import DatasetDescr as DatasetDescr02 + + +class DatasetDescr(GenericDescrBase, title="bioimage.io dataset specification"): + """A bioimage.io dataset resource description file (dataset RDF) describes a dataset relevant to bioimage + processing. + """ + + type: Literal["dataset"] = "dataset" + + id: Optional[DatasetId] = None + """Model zoo (bioimage.io) wide, unique identifier (assigned by bioimage.io)""" + + parent: Optional[DatasetId] = None + """The description from which this one is derived""" + + source: Optional[HttpUrl] = None + """"URL to the source of the dataset.""" + + @model_validator(mode="before") + @classmethod + def _convert(cls, data: Dict[str, Any], /) -> Dict[str, Any]: + if ( + data.get("type") == "dataset" + and isinstance(fv := data.get("format_version"), str) + and fv.startswith("0.2.") + ): + old = DatasetDescr02.load(data) + if isinstance(old, InvalidDescr): + return data + + return cast( + Dict[str, Any], + (cls if TYPE_CHECKING else dict)( + attachments=( + [] + if old.attachments is None + else [FileDescr(source=f) for f in old.attachments.files] + ), + authors=[ + _author_conv.convert_as_dict(a) for a in old.authors + ], # pyright: ignore[reportArgumentType] + badges=old.badges, + cite=[ + {"text": c.text, "doi": c.doi, "url": c.url} for c in old.cite + ], # pyright: ignore[reportArgumentType] + config=old.config, + covers=old.covers, + description=old.description, + documentation=cast(DocumentationSource, old.documentation), + format_version="0.3.0", + git_repo=old.git_repo, # pyright: ignore[reportArgumentType] + icon=old.icon, + id=old.id, + license=old.license, # type: ignore + links=old.links, + maintainers=[ + _maintainer_conv.convert_as_dict(m) for m in old.maintainers + ], # pyright: ignore[reportArgumentType] + name=old.name, + source=old.source, + tags=old.tags, + type=old.type, + uploader=old.uploader, + version=old.version, + **(old.model_extra or {}), + ), + ) + + return data + + +class LinkedDataset(Node): + """Reference to a bioimage.io dataset.""" + + id: DatasetId + """A valid dataset `id` from the bioimage.io collection.""" + + version_number: int + """version number (n-th published version, not the semantic version) of linked dataset""" diff --git a/bioimageio/spec/exceptions.py b/bioimageio/spec/exceptions.py deleted file mode 100644 index 53bd6d1ae..000000000 --- a/bioimageio/spec/exceptions.py +++ /dev/null @@ -1,7 +0,0 @@ -from marshmallow import ValidationError - - -class UnconvertibleError(ValidationError): - """raised by .converters.maybe_convert()""" - - pass diff --git a/bioimageio/spec/generic/__init__.py b/bioimageio/spec/generic/__init__.py new file mode 100644 index 000000000..bd7c26b1e --- /dev/null +++ b/bioimageio/spec/generic/__init__.py @@ -0,0 +1,20 @@ +# autogen: start +""" +implementaions of all released minor versions are available in submodules: +- generic v0_2: `bioimageio.spec.generic.v0_2.GenericDescr` [user documentation](../../../user_docs/generic_descr_v0-2.md) +- generic v0_3: `bioimageio.spec.generic.v0_3.GenericDescr` [user documentation](../../../user_docs/generic_descr_v0-3.md) +""" +from typing import Union + +from pydantic import Discriminator +from typing_extensions import Annotated + +from .v0_2 import GenericDescr as GenericDescr_v0_2 +from .v0_3 import GenericDescr as GenericDescr +from .v0_3 import GenericDescr as GenericDescr_v0_3 + +AnyGenericDescr = Annotated[ + Union[GenericDescr_v0_2, GenericDescr_v0_3], Discriminator("format_version") +] +"""Union of any released generic desription""" +# autogen: stop diff --git a/bioimageio/spec/generic/_v0_2_converter.py b/bioimageio/spec/generic/_v0_2_converter.py new file mode 100644 index 000000000..d6d21c639 --- /dev/null +++ b/bioimageio/spec/generic/_v0_2_converter.py @@ -0,0 +1,116 @@ +import collections.abc +from typing import Any, Dict, Mapping, Union + +from .._internal.io import BioimageioYamlContent + + +def convert_from_older_format(data: BioimageioYamlContent) -> None: + """convert raw RDF data of an older format where possible""" + # check if we have future format version + if "format_version" not in data: + return + + fv = data["format_version"] + if isinstance(fv, str) and tuple(map(int, fv.split(".")[:2])) > (0, 2): + return + + # we unofficially accept strings as author entries + authors = data.get("authors") + if isinstance(authors, list): + data["authors"] = [{"name": a} if isinstance(a, str) else a for a in authors] + + if data.get("format_version") in ("0.2.0", "0.2.1"): + data["format_version"] = "0.2.2" + + if data.get("format_version") == "0.2.2": + remove_slashes_from_names(data) + data["format_version"] = "0.2.3" + + if data.get("format_version") == "0.2.3": + if isinstance(config := data.get("config"), dict) and isinstance( + bconfig := config.get("bioimageio"), dict + ): + if (nickname := bconfig.get("nickname")) is not None: + data["id"] = nickname + + if (nickname_icon := bconfig.get("nickname_icon")) is not None: + data["id_emoji"] = nickname_icon + + data["format_version"] = "0.2.4" + + remove_doi_prefix(data) + remove_gh_prefix(data) + + +def remove_slashes_from_names(data: Dict[Any, Any]) -> None: + NAME = "name" + if NAME in data and isinstance(data[NAME], str): + data[NAME] = data[NAME].replace("/", "").replace("\\", "") + + # update authors and maintainers + def rm_slashes_in_person_name( + person: Union[Any, Mapping[Union[Any, str], Any]], + ) -> Any: + if not isinstance(person, collections.abc.Mapping): + return person + + new_person = dict(person) + if isinstance(n := person.get(NAME), str): + new_person[NAME] = n.replace("/", "").replace("\\", "") + + return new_person + + for group in ("authors", "maintainers"): + persons = data.get(group) + if isinstance(persons, collections.abc.Sequence): + data[group] = [rm_slashes_in_person_name(p) for p in persons] # type: ignore + + +DOI_PREFIXES = ("https://doi.org/", "http://dx.doi.org/") + + +def remove_doi_prefix(data: BioimageioYamlContent) -> None: + """we unofficially accept DOIs starting with "https://doi.org/" here we remove this prefix""" + cite = data.get("cite") + if isinstance(cite, collections.abc.Sequence): + new_cite = list(cite) + for i in range(len(new_cite)): + cite_entry = new_cite[i] + if not isinstance(cite_entry, collections.abc.Mapping): + continue + + doi = cite_entry.get("doi") + if not isinstance(doi, str): + continue + + for doi_prefix in DOI_PREFIXES: + if doi.startswith(doi_prefix): + doi = doi[len(doi_prefix) :] + break + else: + continue + + new_cite_entry = dict(cite_entry) + new_cite_entry["doi"] = doi + new_cite[i] = new_cite_entry + + data["cite"] = new_cite + + +def remove_gh_prefix(data: BioimageioYamlContent) -> None: + def rm_gh(field_name: str): + authors = data.get(field_name) + if not isinstance(authors, list): + return + + for a in authors: + if ( + isinstance(a, dict) + and "github_user" in a + and isinstance(a["github_user"], str) + and a["github_user"].startswith("https://github.com/") + ): + a["github_user"] = a["github_user"][len("https://github.com/") :] + + rm_gh("authors") + rm_gh("maintainers") diff --git a/bioimageio/spec/generic/_v0_3_converter.py b/bioimageio/spec/generic/_v0_3_converter.py new file mode 100644 index 000000000..de095a297 --- /dev/null +++ b/bioimageio/spec/generic/_v0_3_converter.py @@ -0,0 +1,31 @@ +import collections.abc + +from .._internal.io import BioimageioYamlContent +from ._v0_2_converter import convert_from_older_format as convert_from_older_format_v0_2 + + +def convert_from_older_format(data: BioimageioYamlContent) -> None: + """convert raw RDF data of an older format where possible""" + # check if we have future format version + fv = data.get("format_version", "0.2.0") + if ( + not isinstance(fv, str) + or fv.count(".") != 2 + or tuple(map(int, fv.split(".")[:2])) > (0, 3) + ): + return + + convert_from_older_format_v0_2(data) + + convert_attachments(data) + + _ = data.pop("download_url", None) + _ = data.pop("rdf_source", None) + + data["format_version"] = "0.3.0" + + +def convert_attachments(data: BioimageioYamlContent) -> None: + a = data.get("attachments") + if isinstance(a, collections.abc.Mapping): + data["attachments"] = tuple({"source": file} for file in a.get("files", [])) # type: ignore diff --git a/bioimageio/spec/generic/v0_2.py b/bioimageio/spec/generic/v0_2.py new file mode 100644 index 000000000..b678b1364 --- /dev/null +++ b/bioimageio/spec/generic/v0_2.py @@ -0,0 +1,432 @@ +import collections.abc +from typing import ( + Any, + Dict, + List, + Literal, + Mapping, + Optional, + Sequence, + TypeVar, + Union, +) + +from annotated_types import Len, LowerCase, MaxLen +from pydantic import EmailStr, Field, ValidationInfo, field_validator, model_validator +from typing_extensions import Annotated, Self, assert_never + +from .._internal.common_nodes import Node, ResourceDescrBase +from .._internal.constants import TAG_CATEGORIES +from .._internal.field_validation import ( + AfterValidator as _AfterValidator, +) +from .._internal.field_warning import as_warning, issue_warning, warn +from .._internal.io import ( + BioimageioYamlContent, + WithSuffix, + YamlValue, + include_in_package_serializer, +) +from .._internal.io_basics import AbsoluteFilePath as AbsoluteFilePath +from .._internal.types import ( + DeprecatedLicenseId, + FileSource, + ImportantFileSource, + LicenseId, + NotEmpty, +) +from .._internal.types import Doi as Doi +from .._internal.types import OrcidId as OrcidId +from .._internal.types import RelativeFilePath as RelativeFilePath +from .._internal.types import ResourceId as ResourceId +from .._internal.url import HttpUrl as HttpUrl +from .._internal.version_type import Version as Version +from ._v0_2_converter import convert_from_older_format as _convert_from_older_format + +KNOWN_SPECIFIC_RESOURCE_TYPES = ( + "application", + "collection", + "dataset", + "model", + "notebook", +) + +VALID_COVER_IMAGE_EXTENSIONS = ( + ".gif", + ".jpeg", + ".jpg", + ".png", + ".svg", +) + +_WithImageSuffix = WithSuffix(VALID_COVER_IMAGE_EXTENSIONS, case_sensitive=False) +CoverImageSource = Annotated[ + Union[HttpUrl, AbsoluteFilePath, RelativeFilePath], + _WithImageSuffix, + include_in_package_serializer, +] + + +class AttachmentsDescr(Node): + model_config = {**Node.model_config, "extra": "allow"} + """update pydantic model config to allow additional unknown keys""" + files: List[ImportantFileSource] = Field(default_factory=list) + """โˆˆ๐Ÿ“ฆ File attachments""" + + +def _remove_slashes(s: str): + return s.replace("/", "").replace("\\", "") + + +class Uploader(Node): + email: EmailStr + """Email""" + name: Optional[Annotated[str, _AfterValidator(_remove_slashes)]] = None + """name""" + + +class _Person(Node): + affiliation: Optional[str] = None + """Affiliation""" + + email: Optional[EmailStr] = None + """Email""" + + orcid: Annotated[Optional[OrcidId], Field(examples=["0000-0001-2345-6789"])] = None + """An [ORCID iD](https://support.orcid.org/hc/en-us/sections/360001495313-What-is-ORCID + ) in hyphenated groups of 4 digits, (and [valid]( + https://support.orcid.org/hc/en-us/articles/360006897674-Structure-of-the-ORCID-Identifier + ) as per ISO 7064 11,2.) + """ + + +class Author(_Person): + name: Annotated[str, _AfterValidator(_remove_slashes)] + github_user: Optional[str] = None # TODO: validate github_user + + +class Maintainer(_Person): + name: Optional[Annotated[str, _AfterValidator(_remove_slashes)]] = None + github_user: str + + +class BadgeDescr(Node, title="Custom badge"): + """A custom badge""" + + label: Annotated[str, Field(examples=["Open in Colab"])] + """badge label to display on hover""" + + icon: Annotated[ + Union[HttpUrl, None], + Field(examples=["https://colab.research.google.com/assets/colab-badge.svg"]), + ] = None + """badge icon""" + + url: Annotated[ + HttpUrl, + Field( + examples=[ + "https://colab.research.google.com/github/HenriquesLab/ZeroCostDL4Mic/blob/master/Colab_notebooks/U-net_2D_ZeroCostDL4Mic.ipynb" + ] + ), + ] + """target URL""" + + +class CiteEntry(Node): + text: str + """free text description""" + + doi: Optional[Doi] = None + """A digital object identifier (DOI) is the prefered citation reference. + See https://www.doi.org/ for details. (alternatively specify `url`)""" + + @field_validator("doi", mode="before") + @classmethod + def accept_prefixed_doi(cls, doi: Any) -> Any: + if isinstance(doi, str): + for doi_prefix in ("https://doi.org/", "http://dx.doi.org/"): + if doi.startswith(doi_prefix): + doi = doi[len(doi_prefix) :] + break + + return doi + + url: Optional[str] = None + """URL to cite (preferably specify a `doi` instead)""" + + @model_validator(mode="after") + def _check_doi_or_url(self) -> Self: + if not self.doi and not self.url: + raise ValueError("Either 'doi' or 'url' is required") + + return self + + +class LinkedResource(Node): + """Reference to a bioimage.io resource""" + + id: ResourceId + """A valid resource `id` from the bioimage.io collection.""" + + version_number: Optional[int] = None + """version number (n-th published version, not the semantic version) of linked resource""" + + +class GenericModelDescrBase(ResourceDescrBase): + """Base for all resource descriptions including of model descriptions""" + + name: Annotated[NotEmpty[str], warn(MaxLen(128), "Longer than 128 characters.")] + """A human-friendly name of the resource description""" + + description: str + + covers: Annotated[ + List[CoverImageSource], + Field( + examples=["cover.png"], + description=( + "Cover images. Please use an image smaller than 500KB and an aspect" + " ratio width to height of 2:1.\nThe supported image formats are:" + f" {VALID_COVER_IMAGE_EXTENSIONS}" + ), + ), + ] = Field( + default_factory=list, + ) + """โˆˆ๐Ÿ“ฆ Cover images. Please use an image smaller than 500KB and an aspect ratio width to height of 2:1.""" + + id_emoji: Optional[Annotated[str, Len(min_length=1, max_length=1)]] = None + """UTF-8 emoji for display alongside the `id`.""" + + authors: List[Author] = Field(default_factory=list) + """The authors are the creators of the RDF and the primary points of contact.""" + + @field_validator("authors", mode="before") + @classmethod + def accept_author_strings(cls, authors: Union[Any, Sequence[Any]]) -> Any: + """we unofficially accept strings as author entries""" + if isinstance(authors, collections.abc.Sequence): + authors = [{"name": a} if isinstance(a, str) else a for a in authors] + + if not authors: + issue_warning("No author specified.", value=authors) + + return authors + + attachments: Optional[AttachmentsDescr] = None + """file and other attachments""" + + cite: List[CiteEntry] = Field(default_factory=list) + """citations""" + + @field_validator("cite", mode="after") + @classmethod + def _warn_empty_cite(cls, value: Any): + if not value: + issue_warning("No cite entry specified.", value=value) + + return value + + config: Annotated[ + Dict[str, YamlValue], + Field( + examples=[ + dict( + bioimageio={ + "my_custom_key": 3837283, + "another_key": {"nested": "value"}, + }, + imagej={"macro_dir": "path/to/macro/file"}, + ) + ], + ), + ] = Field(default_factory=dict) + """A field for custom configuration that can contain any keys not present in the RDF spec. + This means you should not store, for example, a github repo URL in `config` since we already have the + `git_repo` field defined in the spec. + Keys in `config` may be very specific to a tool or consumer software. To avoid conflicting definitions, + it is recommended to wrap added configuration into a sub-field named with the specific domain or tool name, + for example: + ```yaml + config: + bioimageio: # here is the domain name + my_custom_key: 3837283 + another_key: + nested: value + imagej: # config specific to ImageJ + macro_dir: path/to/macro/file + ``` + If possible, please use [`snake_case`](https://en.wikipedia.org/wiki/Snake_case) for keys in `config`. + You may want to list linked files additionally under `attachments` to include them when packaging a resource + (packaging a resource means downloading/copying important linked files and creating a ZIP archive that contains + an altered rdf.yaml file with local references to the downloaded files)""" + + download_url: Optional[HttpUrl] = None + """URL to download the resource from (deprecated)""" + + git_repo: Annotated[ + Optional[str], + Field( + examples=[ + "https://github.com/bioimage-io/spec-bioimage-io/tree/main/example_specs/models/unet2d_nuclei_broad" + ], + ), + ] = None + """A URL to the Git repository where the resource is being developed.""" + + icon: Union[ + ImportantFileSource, Annotated[str, Len(min_length=1, max_length=2)], None + ] = None + """An icon for illustration""" + + links: Annotated[ + List[str], + Field( + examples=[ + ( + "ilastik/ilastik", + "deepimagej/deepimagej", + "zero/notebook_u-net_3d_zerocostdl4mic", + ) + ], + ), + ] = Field(default_factory=list) + """IDs of other bioimage.io resources""" + + uploader: Optional[Uploader] = None + """The person who uploaded the model (e.g. to bioimage.io)""" + + maintainers: List[Maintainer] = Field(default_factory=list) + """Maintainers of this resource. + If not specified `authors` are maintainers and at least some of them should specify their `github_user` name""" + + rdf_source: Optional[FileSource] = None + """Resource description file (RDF) source; used to keep track of where an rdf.yaml was loaded from. + Do not set this field in a YAML file.""" + + tags: Annotated[ + List[str], + Field(examples=[("unet2d", "pytorch", "nucleus", "segmentation", "dsb2018")]), + ] = Field(default_factory=list) + """Associated tags""" + + @as_warning + @field_validator("tags") + @classmethod + def warn_about_tag_categories( + cls, value: List[str], info: ValidationInfo + ) -> List[str]: + categories = TAG_CATEGORIES.get(info.data["type"], {}) + missing_categories: List[Mapping[str, Sequence[str]]] = [] + for cat, entries in categories.items(): + if not any(e in value for e in entries): + missing_categories.append({cat: entries}) + + if missing_categories: + raise ValueError( + "Missing tags from bioimage.io categories: {missing_categories}" + ) + + return value + + version: Optional[Version] = None + """The version of the resource following SemVer 2.0.""" + + version_number: Optional[int] = None + """version number (n-th published version, not the semantic version)""" + + +class GenericDescrBase(GenericModelDescrBase): + """Base for all resource descriptions except for the model descriptions""" + + format_version: Literal["0.2.4"] = "0.2.4" + """The format version of this resource specification + (not the `version` of the resource description) + When creating a new resource always use the latest micro/patch version described here. + The `format_version` is important for any consumer software to understand how to parse the fields. + """ + + @model_validator(mode="before") + @classmethod + def _convert_from_older_format( + cls, data: BioimageioYamlContent, / + ) -> BioimageioYamlContent: + _convert_from_older_format(data) + return data + + badges: List[BadgeDescr] = Field(default_factory=list) + """badges associated with this resource""" + + documentation: Annotated[ + Optional[ImportantFileSource], + Field( + examples=[ + "https://raw.githubusercontent.com/bioimage-io/spec-bioimage-io/main/example_specs/models/unet2d_nuclei_broad/README.md", + "README.md", + ], + ), + ] = None + """โˆˆ๐Ÿ“ฆ URL or relative path to a markdown file with additional documentation. + The recommended documentation file name is `README.md`. An `.md` suffix is mandatory.""" + + license: Annotated[ + Optional[Union[LicenseId, DeprecatedLicenseId, str]], + Field(examples=["CC-BY-4.0", "MIT", "BSD-2-Clause"]), + ] = None + """A [SPDX license identifier](https://spdx.org/licenses/). + We do not support custom license beyond the SPDX license list, if you need that please + [open a GitHub issue](https://github.com/bioimage-io/spec-bioimage-io/issues/new/choose + ) to discuss your intentions with the community.""" + + @field_validator("license", mode="after") + @classmethod + def deprecated_spdx_license( + cls, value: Optional[Union[LicenseId, DeprecatedLicenseId, str]] + ): + if isinstance(value, LicenseId): + pass + elif value is None: + issue_warning("missing license.", value=value) + elif isinstance(value, DeprecatedLicenseId): + issue_warning("'{value}' is a deprecated license identifier.", value=value) + elif isinstance(value, str): + issue_warning("'{value}' is an unknown license identifier.", value=value) + else: + assert_never(value) + + return value + + +ResourceDescrType = TypeVar("ResourceDescrType", bound=GenericDescrBase) + + +class GenericDescr( + GenericDescrBase, extra="ignore", title="bioimage.io generic specification" +): + """Specification of the fields used in a generic bioimage.io-compliant resource description file (RDF). + + An RDF is a YAML file that describes a resource such as a model, a dataset, or a notebook. + Note that those resources are described with a type-specific RDF. + Use this generic resource description, if none of the known specific types matches your resource. + """ + + type: Annotated[str, LowerCase, Field(frozen=True)] = "generic" + """The resource type assigns a broad category to the resource.""" + + id: Optional[ResourceId] = None + """Model zoo (bioimage.io) wide, unique identifier (assigned by bioimage.io)""" + + source: Optional[HttpUrl] = None + """The primary source of the resource""" + + @field_validator("type", mode="after") + @classmethod + def check_specific_types(cls, value: str) -> str: + if value in KNOWN_SPECIFIC_RESOURCE_TYPES: + raise ValueError( + f"Use the {value} description instead of this generic description for" + + f" your '{value}' resource." + ) + + return value diff --git a/bioimageio/spec/generic/v0_3.py b/bioimageio/spec/generic/v0_3.py new file mode 100644 index 000000000..7d4bdc25d --- /dev/null +++ b/bioimageio/spec/generic/v0_3.py @@ -0,0 +1,388 @@ +from __future__ import annotations + +from functools import partial +from typing import Any, Dict, List, Literal, Optional, Sequence, TypeVar, Union + +from annotated_types import Len, LowerCase, MaxLen +from pydantic import Field, ValidationInfo, field_validator, model_validator +from typing_extensions import Annotated + +from bioimageio.spec._internal.field_validation import validate_gh_user + +from .._internal.common_nodes import ( + Converter, + Node, + ResourceDescrBase, +) +from .._internal.constants import ( + TAG_CATEGORIES, +) +from .._internal.field_validation import AfterValidator, Predicate +from .._internal.field_warning import as_warning, warn +from .._internal.io import ( + BioimageioYamlContent, + V_suffix, + YamlValue, + include_in_package_serializer, + validate_suffix, +) +from .._internal.io import FileDescr as FileDescr +from .._internal.io import Sha256 as Sha256 +from .._internal.io_basics import AbsoluteFilePath +from .._internal.license_id import LicenseId +from .._internal.types import ( + DeprecatedLicenseId, + ImportantFileSource, + NotEmpty, +) +from .._internal.types import RelativeFilePath as RelativeFilePath +from .._internal.types import ResourceId as ResourceId +from .._internal.url import HttpUrl as HttpUrl +from .._internal.version_type import Version as Version +from .._internal.warning_levels import ALERT +from ._v0_3_converter import convert_from_older_format +from .v0_2 import VALID_COVER_IMAGE_EXTENSIONS, CoverImageSource +from .v0_2 import Author as _Author_v0_2 +from .v0_2 import BadgeDescr as BadgeDescr +from .v0_2 import Doi as Doi +from .v0_2 import Maintainer as _Maintainer_v0_2 +from .v0_2 import OrcidId as OrcidId +from .v0_2 import Uploader as Uploader + +KNOWN_SPECIFIC_RESOURCE_TYPES = ( + "application", + "collection", + "dataset", + "model", + "notebook", +) + + +def _validate_md_suffix(value: V_suffix) -> V_suffix: + return validate_suffix(value, suffix=".md", case_sensitive=True) + + +DocumentationSource = Annotated[ + Union[AbsoluteFilePath, RelativeFilePath, HttpUrl], + AfterValidator(_validate_md_suffix), + include_in_package_serializer, +] + + +def _has_no_slash(s: str) -> bool: + return "/" not in s and "\\" not in s + + +class Author(_Author_v0_2): + name: Annotated[str, Predicate(_has_no_slash)] + github_user: Optional[str] = None + + @field_validator("github_user", mode="after") + def _validate_gh_user(cls, value: Optional[str]): + if value is None: + return None + else: + return validate_gh_user(value) + + +class _AuthorConv(Converter[_Author_v0_2, Author]): + def _convert( + self, src: _Author_v0_2, tgt: "type[Author] | type[dict[str, Any]]" + ) -> "Author | dict[str, Any]": + return tgt( + name=src.name, + github_user=src.github_user, + affiliation=src.affiliation, + email=src.email, + orcid=src.orcid, + ) + + +_author_conv = _AuthorConv(_Author_v0_2, Author) + + +class Maintainer(_Maintainer_v0_2): + name: Optional[Annotated[str, Predicate(_has_no_slash)]] = None + github_user: str + + @field_validator("github_user", mode="after") + def validate_gh_user(cls, value: str): + return validate_gh_user(value) + + +class _MaintainerConv(Converter[_Maintainer_v0_2, Maintainer]): + def _convert( + self, src: _Maintainer_v0_2, tgt: "type[Maintainer | dict[str, Any]]" + ) -> "Maintainer | dict[str, Any]": + return tgt( + name=src.name, + github_user=src.github_user, + affiliation=src.affiliation, + email=src.email, + orcid=src.orcid, + ) + + +_maintainer_conv = _MaintainerConv(_Maintainer_v0_2, Maintainer) + + +class CiteEntry(Node): + text: str + """free text description""" + + doi: Optional[Doi] = None + """A digital object identifier (DOI) is the prefered citation reference. + See https://www.doi.org/ for details. (alternatively specify `url`)""" + + url: Optional[HttpUrl] = None + """URL to cite (preferably specify a `doi` instead)""" + + @model_validator(mode="after") + def _check_doi_or_url(self): + if not self.doi and not self.url: + raise ValueError("Either 'doi' or 'url' is required") + + return self + + +class LinkedResource(Node): + """Reference to a bioimage.io resource""" + + id: ResourceId + """A valid resource `id` from the official bioimage.io collection.""" + + version_number: int + """version number (n-th published version, not the semantic version) of linked resource""" + + +class GenericModelDescrBase(ResourceDescrBase): + """Base for all resource descriptions including of model descriptions""" + + name: Annotated[NotEmpty[str], MaxLen(128)] + """A human-friendly name of the resource description""" + + description: Annotated[ + str, MaxLen(1024), warn(MaxLen(512), "Description longer than 512 characters.") + ] + """A string containing a brief description.""" + + covers: Annotated[ + List[CoverImageSource], + Field( + examples=[], + description=( + "Cover images. Please use an image smaller than 500KB and an aspect" + " ratio width to height of 2:1 or 1:1.\nThe supported image formats" + f" are: {VALID_COVER_IMAGE_EXTENSIONS}" + ), + ), + ] = Field(default_factory=list) + """โˆˆ๐Ÿ“ฆ Cover images.""" + + id_emoji: Optional[Annotated[str, Len(min_length=1, max_length=1)]] = None + """UTF-8 emoji for display alongside the `id`.""" + + authors: NotEmpty[List[Author]] + """The authors are the creators of this resource description and the primary points of contact.""" + + attachments: List[FileDescr] = Field(default_factory=list) + """file attachments""" + + cite: NotEmpty[List[CiteEntry]] + """citations""" + + license: Annotated[ + Union[LicenseId, DeprecatedLicenseId], + warn( + LicenseId, + "{value} is deprecated, see https://spdx.org/licenses/{value}.html", + ), + Field(examples=["CC-BY-4.0", "MIT", "BSD-2-Clause"]), + ] + """A [SPDX license identifier](https://spdx.org/licenses/). + We do not support custom license beyond the SPDX license list, if you need that please + [open a GitHub issue](https://github.com/bioimage-io/spec-bioimage-io/issues/new/choose) + to discuss your intentions with the community.""" + + config: Annotated[ + Dict[str, YamlValue], + Field( + examples=[ + dict( + bioimageio={ + "my_custom_key": 3837283, + "another_key": {"nested": "value"}, + }, + imagej={"macro_dir": "path/to/macro/file"}, + ) + ], + ), + ] = Field(default_factory=dict) + """A field for custom configuration that can contain any keys not present in the RDF spec. + This means you should not store, for example, a GitHub repo URL in `config` since there is a `git_repo` field. + Keys in `config` may be very specific to a tool or consumer software. To avoid conflicting definitions, + it is recommended to wrap added configuration into a sub-field named with the specific domain or tool name, + for example: + ```yaml + config: + bioimageio: # here is the domain name + my_custom_key: 3837283 + another_key: + nested: value + imagej: # config specific to ImageJ + macro_dir: path/to/macro/file + ``` + If possible, please use [`snake_case`](https://en.wikipedia.org/wiki/Snake_case) for keys in `config`. + You may want to list linked files additionally under `attachments` to include them when packaging a resource. + (Packaging a resource means downloading/copying important linked files and creating a ZIP archive that contains + an altered rdf.yaml file with local references to the downloaded files.)""" + + git_repo: Annotated[ + Optional[HttpUrl], + Field( + examples=[ + "https://github.com/bioimage-io/spec-bioimage-io/tree/main/example_specs/models/unet2d_nuclei_broad" + ], + ), + ] = None + """A URL to the Git repository where the resource is being developed.""" + + icon: Union[ + ImportantFileSource, Annotated[str, Len(min_length=1, max_length=2)], None + ] = None + """An icon for illustration, e.g. on bioimage.io""" + + links: Annotated[ + List[str], + Field( + examples=[ + ( + "ilastik/ilastik", + "deepimagej/deepimagej", + "zero/notebook_u-net_3d_zerocostdl4mic", + ) + ], + ), + ] = Field(default_factory=list) + """IDs of other bioimage.io resources""" + + uploader: Optional[Uploader] = None + """The person who uploaded the model (e.g. to bioimage.io)""" + + maintainers: List[Maintainer] = Field(default_factory=list) + """Maintainers of this resource. + If not specified, `authors` are maintainers and at least some of them has to specify their `github_user` name""" + + @partial(as_warning, severity=ALERT) + @field_validator("maintainers", mode="after") + @classmethod + def check_maintainers_exist( + cls, maintainers: List[Maintainer], info: ValidationInfo + ) -> List[Maintainer]: + if not maintainers and "authors" in info.data: + authors: List[Author] = info.data["authors"] + if all(a.github_user is None for a in authors): + raise ValueError( + "Missing `maintainers` or any author in `authors` with a specified" + + " `github_user` name." + ) + + return maintainers + + tags: Annotated[ + List[str], + Field(examples=[("unet2d", "pytorch", "nucleus", "segmentation", "dsb2018")]), + ] = Field(default_factory=list) + """Associated tags""" + + @as_warning + @field_validator("tags") + @classmethod + def warn_about_tag_categories( + cls, value: List[str], info: ValidationInfo + ) -> List[str]: + categories = TAG_CATEGORIES.get(info.data["type"], {}) + missing_categories: List[Dict[str, Sequence[str]]] = [] + for cat, entries in categories.items(): + if not any(e in value for e in entries): + missing_categories.append({cat: entries}) + + if missing_categories: + raise ValueError( + f"Missing tags from bioimage.io categories: {missing_categories}" + ) + + return value + + version: Optional[Version] = None + """The version of the resource following SemVer 2.0.""" + + version_number: Optional[int] = None + """version number (n-th published version, not the semantic version)""" + + +class GenericDescrBase(GenericModelDescrBase): + """Base for all resource descriptions except for the model descriptions""" + + format_version: Literal["0.3.0"] = "0.3.0" + """The **format** version of this resource specification""" + + @model_validator(mode="before") + @classmethod + def _convert_from_older_format( + cls, data: BioimageioYamlContent, / + ) -> BioimageioYamlContent: + convert_from_older_format(data) + return data + + documentation: Annotated[ + Optional[DocumentationSource], + Field( + examples=[ + "https://raw.githubusercontent.com/bioimage-io/spec-bioimage-io/main/example_specs/models/unet2d_nuclei_broad/README.md", + "README.md", + ], + ), + ] = None + """โˆˆ๐Ÿ“ฆ URL or relative path to a markdown file with additional documentation. + The recommended documentation file name is `README.md`. An `.md` suffix is mandatory.""" + + badges: List[BadgeDescr] = Field(default_factory=list) + """badges associated with this resource""" + + +ResourceDescrType = TypeVar("ResourceDescrType", bound=GenericDescrBase) + + +class GenericDescr( + GenericDescrBase, extra="ignore", title="bioimage.io generic specification" +): + """Specification of the fields used in a generic bioimage.io-compliant resource description file (RDF). + + An RDF is a YAML file that describes a resource such as a model, a dataset, or a notebook. + Note that those resources are described with a type-specific RDF. + Use this generic resource description, if none of the known specific types matches your resource. + """ + + type: Annotated[str, LowerCase] = Field("generic", frozen=True) + """The resource type assigns a broad category to the resource.""" + + id: Optional[ResourceId] = None + """Model zoo (bioimage.io) wide, unique identifier (assigned by bioimage.io)""" + + parent: Optional[ResourceId] = None + """The description from which this one is derived""" + + source: Optional[HttpUrl] = None + """The primary source of the resource""" + + @field_validator("type", mode="after") + @classmethod + def check_specific_types(cls, value: str) -> str: + if value in KNOWN_SPECIFIC_RESOURCE_TYPES: + raise ValueError( + f"Use the {value} description instead of this generic description for" + + f" your '{value}' resource." + ) + + return value diff --git a/bioimageio/spec/io_.py b/bioimageio/spec/io_.py deleted file mode 100644 index 1ff3363b5..000000000 --- a/bioimageio/spec/io_.py +++ /dev/null @@ -1,349 +0,0 @@ -"""simple io functionality to deserialize a resource description from a resource description file (RDF) -(in form of a dict, e.g. from yaml.load('rdf.yaml') to a raw_nodes.ResourceDescription raw node, -which is a python dataclass -""" -import os -import pathlib -import warnings -import zipfile -from hashlib import sha256 -from io import StringIO -from tempfile import TemporaryDirectory -from types import ModuleType -from typing import Dict, IO, Optional, Sequence, Tuple, Union - -from marshmallow import ValidationError, missing -from packaging.version import Version - -from bioimageio.spec.shared import RDF_NAMES, raw_nodes, resolve_rdf_source, resolve_rdf_source_and_type, resolve_source -from bioimageio.spec.shared.common import ( - BIOIMAGEIO_CACHE_PATH, - BIOIMAGEIO_USE_CACHE, - get_class_name_from_type, - get_format_version_module, - get_latest_format_version, - get_latest_format_version_module, - no_cache_tmp_list, - yaml, -) -from bioimageio.spec.shared.node_transformer import ( - AbsoluteToRelativePathTransformer, - GenericRawNode, - GenericRawRD, - RawNodePackageTransformer, - RelativePathTransformer, -) -from bioimageio.spec.shared.raw_nodes import ResourceDescription as RawResourceDescription -from bioimageio.spec.shared.schema import SharedBioImageIOSchema - -try: - from typing import Protocol -except ImportError: - from typing_extensions import Protocol # type: ignore - - -LATEST = "latest" - - -class ConvertersModule(Protocol): - def maybe_convert(self, data: dict) -> dict: - raise NotImplementedError - - -class SubModuleUtils(Protocol): - def filter_resource_description(self, raw_rd: GenericRawNode, **kwargs) -> GenericRawNode: - raise NotImplementedError - - -class SpecSubmodule(Protocol): - format_version: str - - converters: ConvertersModule - nodes: ModuleType - raw_nodes: ModuleType - schema: ModuleType - utils: SubModuleUtils - - -def _get_spec_submodule(type_: str, data_version: str = LATEST) -> SpecSubmodule: - if not isinstance(data_version, str): - raise TypeError(f"invalid 'format_version' {data_version}") - - if data_version == LATEST: - sub_spec = get_latest_format_version_module(type_) - else: - sub_spec = get_format_version_module(type_, data_version) - - return sub_spec - - -def extract_resource_package( - source: Union[os.PathLike, IO, str, bytes, raw_nodes.URI] -) -> Tuple[dict, str, pathlib.Path]: - """extract a zip source to BIOIMAGEIO_CACHE_PATH""" - src, source_name, root = resolve_rdf_source(source) - if isinstance(root, bytes): - raise NotImplementedError("package source was bytes") - - if BIOIMAGEIO_USE_CACHE: - package_path = BIOIMAGEIO_CACHE_PATH / "extracted_packages" / sha256(str(root).encode("utf-8")).hexdigest() - package_path.mkdir(exist_ok=True, parents=True) - else: - tmp_dir = TemporaryDirectory() - no_cache_tmp_list.append(tmp_dir) - package_path = pathlib.Path(tmp_dir.name) - - if isinstance(root, raw_nodes.URI): - for rdf_name in RDF_NAMES: - if (package_path / rdf_name).exists(): - download = None - break - else: - download = resolve_source(root) - - local_source = download - else: - download = None - local_source = root - - if local_source is not None: - with zipfile.ZipFile(local_source) as zf: - zf.extractall(package_path) - - for rdf_name in RDF_NAMES: - if (package_path / rdf_name).exists(): - break - else: - raise FileNotFoundError(f"Missing 'rdf.yaml' in {root} extracted from {download}") - - if download is not None: - try: - os.remove(download) - except Exception as e: - warnings.warn(f"Could not remove download {download} due to {e}") - - assert isinstance(package_path, pathlib.Path) - return src, source_name, package_path - - -def load_raw_resource_description( - source: Union[dict, os.PathLike, IO, str, bytes, raw_nodes.URI, RawResourceDescription], - update_to_format: Optional[str] = None, -) -> RawResourceDescription: - """load a raw python representation from a BioImage.IO resource description. - Use `bioimageio.core.load_resource_description` for a more convenient representation of the resource. - and `bioimageio.core.load_raw_resource_description` to ensure the 'root_path' attribute of the returned object is - a local file path. - - Args: - source: resource description or resource description file (RDF) - update_to_format: update resource to specific major.minor format version; ignoring patch version. - Returns: - raw BioImage.IO resource - """ - root = None - if isinstance(source, RawResourceDescription): - if update_to_format == "latest": - update_to_format = get_latest_format_version(source.type) - - if update_to_format is not None and source.format_version != update_to_format: - # do serialization round-trip to account for 'update_to_format' but keep root_path - root = source.root_path - source = serialize_raw_resource_description_to_dict(source) - else: - return source - - data, source_name, _root, type_ = resolve_rdf_source_and_type(source) - if root is None: - root = _root - - class_name = get_class_name_from_type(type_) - - # determine submodule's format version - original_data_version = data.get("format_version") - if original_data_version is None: - odv: Optional[Version] = None - else: - try: - odv = Version(original_data_version) - except Exception as e: - raise ValueError(f"Invalid format version {original_data_version}.") from e - - if update_to_format is None: - data_version = original_data_version or LATEST - elif update_to_format == LATEST: - data_version = LATEST - else: - data_version = ".".join(update_to_format.split("."[:2])) - if update_to_format.count(".") > 1: - warnings.warn( - f"Ignoring patch version of update_to_format {update_to_format} " - f"(always updating to latest patch version)." - ) - - try: - sub_spec = _get_spec_submodule(type_, data_version) - except ValueError as e: - if odv is None: - raise e # raise original error; no second attempt with 'LATEST' format version - - try: - # load latest spec submodule - sub_spec = _get_spec_submodule(type_, data_version=LATEST) - except ValueError: - raise e # raise original error with desired data_version - - if odv <= Version(sub_spec.format_version): - # original format version is not a future version. - # => we should not fall back to latest format version. - # => 'format_version' may be invalid or the issue lies with 'type_'... - raise e - - downgrade_format_version = odv and Version(sub_spec.format_version) < odv - if downgrade_format_version: - warnings.warn( - f"Loading future {type_} format version {original_data_version} as (latest known) " - f"{sub_spec.format_version}." - ) - data["format_version"] = sub_spec.format_version # set format_version to latest available - - # save original format version under config:bioimageio:original_format_version for reference - if "config" not in data: - data["config"] = {} - - if "bioimageio" not in data["config"]: - data["config"]["bioimageio"] = {} - - data["config"]["bioimageio"]["original_format_version"] = original_data_version - - schema: SharedBioImageIOSchema = getattr(sub_spec.schema, class_name)() - - data = sub_spec.converters.maybe_convert(data) - try: - raw_rd = schema.load(data) - except ValidationError as e: - if downgrade_format_version: - e.messages["format_version"] = ( - f"Other errors may be caused by a possibly incompatible future format version {original_data_version} " - f"treated as {sub_spec.format_version}." - ) - - raise e - - if isinstance(root, pathlib.Path): - root = root.resolve() - if zipfile.is_zipfile(root): - # set root to extracted zip package - _, _, root = extract_resource_package(root) - elif isinstance(root, bytes): - root = pathlib.Path().resolve() - - raw_rd.root_path = root - raw_rd = RelativePathTransformer(root=root).transform(raw_rd) - - return raw_rd - - -def serialize_raw_resource_description_to_dict( - raw_rd: RawResourceDescription, convert_absolute_paths: bool = False -) -> dict: - """serialize a raw nodes resource description to a dict with the content of a resource description file (RDF). - If 'convert_absolute_paths' all absolute paths are converted to paths relative to raw_rd.root_path before - serialization. - """ - class_name = get_class_name_from_type(raw_rd.type) - sub_spec = _get_spec_submodule(raw_rd.type, raw_rd.format_version) - schema: SharedBioImageIOSchema = getattr(sub_spec.schema, class_name)() - - if convert_absolute_paths: - raw_rd = AbsoluteToRelativePathTransformer(root=raw_rd.root_path).transform(raw_rd) - - serialized = schema.dump(raw_rd) - assert isinstance(serialized, dict) - assert missing not in serialized.values() - - return serialized - - -def serialize_raw_resource_description(raw_rd: RawResourceDescription, convert_absolute_paths: bool = True) -> str: - if yaml is None: - raise RuntimeError("'serialize_raw_resource_description' requires yaml") - - serialized = serialize_raw_resource_description_to_dict(raw_rd, convert_absolute_paths=convert_absolute_paths) - - with StringIO() as stream: - yaml.dump(serialized, stream) - return stream.getvalue() - - -def save_raw_resource_description(raw_rd: RawResourceDescription, path: pathlib.Path): - if yaml is None: - raise RuntimeError("'save_raw_resource_description' requires yaml") - - warnings.warn("only saving serialized rdf, no associated resources.") - if path.suffix != ".yaml": - warnings.warn("saving with '.yaml' suffix is strongly encouraged.") - - serialized = serialize_raw_resource_description_to_dict(raw_rd) - yaml.dump(serialized, path) - - -def get_resource_package_content_wo_rdf( - raw_rd: Union[GenericRawRD, raw_nodes.URI, str, pathlib.Path], - *, - weights_priority_order: Optional[Sequence[str]] = None, # model only -) -> Tuple[raw_nodes.ResourceDescription, Dict[str, Union[pathlib.PurePath, raw_nodes.URI]]]: - """ - Args: - raw_rd: raw resource description - # for model resources only: - weights_priority_order: If given only the first weights format present in the model is included. - If none of the prioritized weights formats is found all are included. - - Returns: - Tuple of updated raw resource description and package content of remote URIs, local file paths or text content - keyed by file names. - Important note: the serialized rdf.yaml is not included. - """ - if isinstance(raw_rd, raw_nodes.ResourceDescription): - r_rd = raw_rd - else: - r_rd = load_raw_resource_description(raw_rd) - - sub_spec = _get_spec_submodule(r_rd.type, r_rd.format_version) - if r_rd.type == "model": - filter_kwargs = dict(weights_priority_order=weights_priority_order) - else: - filter_kwargs = {} - - r_rd = sub_spec.utils.filter_resource_description(r_rd, **filter_kwargs) - - content: Dict[str, Union[pathlib.PurePath, raw_nodes.URI]] = {} - r_rd = RawNodePackageTransformer(content, r_rd.root_path).transform(r_rd) - assert "rdf.yaml" not in content - return r_rd, content - - -def get_resource_package_content( - raw_rd: Union[raw_nodes.ResourceDescription, raw_nodes.URI, str, pathlib.Path], - *, - weights_priority_order: Optional[Sequence[str]] = None, # model only -) -> Dict[str, Union[str, pathlib.PurePath, raw_nodes.URI]]: - """ - Args: - raw_rd: raw resource description - # for model resources only: - weights_priority_order: If given only the first weights format present in the model is included. - If none of the prioritized weights formats is found all are included. - - Returns: - Package content of remote URIs, local file paths or text content keyed by file names. - """ - if yaml is None: - raise RuntimeError( - "'get_resource_package_content' requires yaml; note that 'get_resource_package_content_wo_rdf' may be used " - "without yaml" - ) - - r_rd, content = get_resource_package_content_wo_rdf(raw_rd, weights_priority_order=weights_priority_order) - return {**content, **{"rdf.yaml": serialize_raw_resource_description(r_rd)}} diff --git a/bioimageio/spec/model/__init__.py b/bioimageio/spec/model/__init__.py index 308c221ba..85b483e96 100644 --- a/bioimageio/spec/model/__init__.py +++ b/bioimageio/spec/model/__init__.py @@ -1,15 +1,20 @@ -from . import v0_3, v0_4 - -# autogen: start -from . import converters, raw_nodes, schema, utils -from .raw_nodes import FormatVersion - -try: - from typing import get_args -except ImportError: - from typing_extensions import get_args # type: ignore - - -format_version = get_args(FormatVersion)[-1] - -# autogen: stop +# autogen: start +""" +implementaions of all released minor versions are available in submodules: +- model v0_4: `bioimageio.spec.model.v0_4.ModelDescr` [user documentation](../../../user_docs/model_descr_v0-4.md) +- model v0_5: `bioimageio.spec.model.v0_5.ModelDescr` [user documentation](../../../user_docs/model_descr_v0-5.md) +""" +from typing import Union + +from pydantic import Discriminator +from typing_extensions import Annotated + +from .v0_4 import ModelDescr as ModelDescr_v0_4 +from .v0_5 import ModelDescr as ModelDescr +from .v0_5 import ModelDescr as ModelDescr_v0_5 + +AnyModelDescr = Annotated[ + Union[ModelDescr_v0_4, ModelDescr_v0_5], Discriminator("format_version") +] +"""Union of any released model desription""" +# autogen: stop diff --git a/bioimageio/spec/model/_v0_3_converter.py b/bioimageio/spec/model/_v0_3_converter.py new file mode 100644 index 000000000..97ed677fa --- /dev/null +++ b/bioimageio/spec/model/_v0_3_converter.py @@ -0,0 +1,117 @@ +# type: ignore +from typing import Any, Dict + + +def convert_model_from_v0_3_to_0_4_0(data: Dict[str, Any]) -> None: + """auto converts model 'data' to newest format""" + + if "format_version" not in data: + return + + if data["format_version"] == "0.3.0": + # no breaking change, bump to 0.3.1 + data["format_version"] = "0.3.1" + + if data["format_version"] == "0.3.1": + data = _convert_model_v0_3_1_to_v0_3_2(data) + + if data["format_version"] == "0.3.2": + data = _convert_model_v0_3_2_to_v0_3_3(data) + + if data["format_version"] in ("0.3.3", "0.3.4", "0.3.5"): + data["format_version"] = "0.3.6" + + if data["format_version"] != "0.3.6": + return + + # remove 'future' from config if no other than the used future entries exist + config = data.get("config", {}) + if config.get("future") == {}: + del config["future"] + + # remove 'config' if now empty + if data.get("config") == {}: + del data["config"] + + data.pop("language", None) + data.pop("framework", None) + + architecture = data.pop("source", None) + architecture_sha256 = data.pop("sha256", None) + kwargs = data.pop("kwargs", None) + pytorch_state_dict_weights_entry = data.get("weights", {}).get("pytorch_state_dict") + if pytorch_state_dict_weights_entry is not None: + if architecture is not None: + pytorch_state_dict_weights_entry["architecture"] = architecture + + if architecture_sha256 is not None: + pytorch_state_dict_weights_entry["architecture_sha256"] = ( + architecture_sha256 + ) + + if kwargs is not None: + pytorch_state_dict_weights_entry["kwargs"] = kwargs + + torchscript_weights_entry = data.get("weights", {}).pop("pytorch_script", None) + if torchscript_weights_entry is not None: + data.setdefault("weights", {})["torchscript"] = torchscript_weights_entry + + data["format_version"] = "0.4.0" + + +def _convert_model_v0_3_1_to_v0_3_2(data: Dict[str, Any]) -> Dict[str, Any]: + data["type"] = "model" + data["format_version"] = "0.3.2" + future = data.get("config", {}).get("future", {}).pop("0.3.2", {}) + + authors = data.get("authors") + if isinstance(authors, list): + data["authors"] = [{"name": name} for name in authors] + authors_update = future.get("authors") + if authors_update is not None: + for a, u in zip(data["authors"], authors_update): + a.update(u) + + # packaged_by + packaged_by = data.get("packaged_by") + if packaged_by is not None: + data["packaged_by"] = [{"name": name} for name in data["packaged_by"]] + packaged_by_update = future.get("packaged_by") + if packaged_by_update is not None: + for a, u in zip(data["packaged_by"], packaged_by_update): + a.update(u) + + # authors of weights + weights = data.get("weights") + if isinstance(weights, dict): + for weights_format, weights_entry in weights.items(): + if "authors" not in weights_entry: + continue + + weights_entry["authors"] = [ + {"name": name} for name in weights_entry["authors"] + ] + authors_update = ( + future.get("weights", {}).get(weights_format, {}).get("authors") + ) + if authors_update is not None: + for a, u in zip(weights_entry["authors"], authors_update): + a.update(u) + + # model version + if "version" in future: + data["version"] = future.pop("version") + + return data + + +def _convert_model_v0_3_2_to_v0_3_3(data: Dict[str, Any]) -> Dict[str, Any]: + data["format_version"] = "0.3.3" + if "outputs" in data: + for out in data["outputs"]: + if "shape" in out: + shape = out["shape"] + if isinstance(shape, dict) and "reference_input" in shape: + shape["reference_tensor"] = shape.pop("reference_input") + + return data diff --git a/bioimageio/spec/model/_v0_4_converter.py b/bioimageio/spec/model/_v0_4_converter.py new file mode 100644 index 000000000..d4f46c3be --- /dev/null +++ b/bioimageio/spec/model/_v0_4_converter.py @@ -0,0 +1,77 @@ +import collections.abc + +from .._internal.io import BioimageioYamlContent +from ..generic._v0_2_converter import ( + remove_doi_prefix, + remove_gh_prefix, + remove_slashes_from_names, +) +from ._v0_3_converter import convert_model_from_v0_3_to_0_4_0 + + +def convert_from_older_format(data: BioimageioYamlContent) -> None: + fv = data.get("format_version") + if not isinstance(fv, str): + return + + major_minor = tuple(map(int, fv.split(".")[:2])) + if major_minor < (0, 4): + convert_model_from_v0_3_to_0_4_0(data) + elif major_minor > (0, 4): + return + + if data["format_version"] == "0.4.0": + _convert_model_from_v0_4_0_to_0_4_1(data) + + if data["format_version"] in ("0.4.1", "0.4.2", "0.4.3", "0.4.4"): + _convert_model_from_v0_4_4_to_0_4_5(data) + + if data["format_version"] in ("0.4.5", "0.4.6"): + remove_slashes_from_names(data) + data["format_version"] = "0.4.7" + + if data["format_version"] in ("0.4.7", "0.4.8"): + data["format_version"] = "0.4.9" + + if data["format_version"] == "0.4.9": + if isinstance(config := data.get("config"), dict) and isinstance( + bconfig := config.get("bioimageio"), dict + ): + if (nickname := bconfig.get("nickname")) is not None: + data["id"] = nickname + + if (nickname_icon := bconfig.get("nickname_icon")) is not None: + data["id_emoji"] = nickname_icon + + data["format_version"] = "0.4.10" + + remove_doi_prefix(data) + remove_gh_prefix(data) + # remove 'future' from config if no other than the used future entries exist + config = data.get("config", {}) + if isinstance(config, dict) and config.get("future") == {}: + del config["future"] + + # remove 'config' if now empty + if data.get("config") == {}: + del data["config"] + + +def _convert_model_from_v0_4_0_to_0_4_1(data: BioimageioYamlContent): + # move dependencies from root to pytorch_state_dict weights entry + deps = data.pop("dependencies", None) + weights = data.get("weights", {}) + if deps and weights and isinstance(weights, dict): + entry = weights.get("pytorch_state_dict") + if entry and isinstance(entry, dict): + entry["dependencies"] = deps + + data["format_version"] = "0.4.1" + + +def _convert_model_from_v0_4_4_to_0_4_5(data: BioimageioYamlContent) -> None: + parent = data.pop("parent", None) + if isinstance(parent, collections.abc.Mapping) and "uri" in parent: + data["parent"] = parent["uri"] + + data["format_version"] = "0.4.5" diff --git a/bioimageio/spec/model/converters.py b/bioimageio/spec/model/converters.py deleted file mode 100644 index 13e4e8557..000000000 --- a/bioimageio/spec/model/converters.py +++ /dev/null @@ -1,3 +0,0 @@ -# Auto-generated by generate_passthrough_modules.py - do not modify - -from .v0_4.converters import * diff --git a/bioimageio/spec/model/raw_nodes.py b/bioimageio/spec/model/raw_nodes.py deleted file mode 100644 index 31ec034c3..000000000 --- a/bioimageio/spec/model/raw_nodes.py +++ /dev/null @@ -1,3 +0,0 @@ -# Auto-generated by generate_passthrough_modules.py - do not modify - -from .v0_4.raw_nodes import * diff --git a/bioimageio/spec/model/schema.py b/bioimageio/spec/model/schema.py deleted file mode 100644 index ef76dc15a..000000000 --- a/bioimageio/spec/model/schema.py +++ /dev/null @@ -1,3 +0,0 @@ -# Auto-generated by generate_passthrough_modules.py - do not modify - -from .v0_4.schema import * diff --git a/bioimageio/spec/model/utils.py b/bioimageio/spec/model/utils.py deleted file mode 100644 index 214dadc3b..000000000 --- a/bioimageio/spec/model/utils.py +++ /dev/null @@ -1,3 +0,0 @@ -# Auto-generated by generate_passthrough_modules.py - do not modify - -from .v0_4.utils import * diff --git a/bioimageio/spec/model/v0_3/__init__.py b/bioimageio/spec/model/v0_3/__init__.py deleted file mode 100644 index 71ad3da34..000000000 --- a/bioimageio/spec/model/v0_3/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -from . import converters, raw_nodes, schema, utils -from .raw_nodes import FormatVersion - -try: - from typing import get_args -except ImportError: - from typing_extensions import get_args # type: ignore - - -format_version = get_args(FormatVersion)[-1] diff --git a/bioimageio/spec/model/v0_3/converters.py b/bioimageio/spec/model/v0_3/converters.py deleted file mode 100644 index 0c7c26385..000000000 --- a/bioimageio/spec/model/v0_3/converters.py +++ /dev/null @@ -1,120 +0,0 @@ -import copy -import pathlib -from typing import Any, Dict, Union - -from marshmallow import Schema - -from . import raw_nodes, schema - -AUTO_CONVERTED_DOCUMENTATION_FILE_NAME = "auto_converted_documentation.md" - - -def convert_model_v0_3_1_to_v0_3_2(data: Dict[str, Any]) -> Dict[str, Any]: - data["type"] = "model" - data["format_version"] = "0.3.2" - future = data.get("config", {}).get("future", {}).pop("0.3.2", {}) - - authors = data.get("authors") - if isinstance(authors, list): - data["authors"] = [{"name": name} for name in authors] - authors_update = future.get("authors") - if authors_update is not None: - for a, u in zip(data["authors"], authors_update): - a.update(u) - - # packaged_by - packaged_by = data.get("packaged_by") - if packaged_by is not None: - data["packaged_by"] = [{"name": name} for name in data["packaged_by"]] - packaged_by_update = future.get("packaged_by") - if packaged_by_update is not None: - for a, u in zip(data["packaged_by"], packaged_by_update): - a.update(u) - - # authors of weights - weights = data.get("weights") - if isinstance(weights, dict): - for weights_format, weights_entry in weights.items(): - if "authors" not in weights_entry: - continue - - weights_entry["authors"] = [{"name": name} for name in weights_entry["authors"]] - authors_update = future.get("weights", {}).get(weights_format, {}).get("authors") - if authors_update is not None: - for a, u in zip(weights_entry["authors"], authors_update): - a.update(u) - - # documentation: we now enforce `documentation` to be a local md file - if "documentation" in data: - - class DocSchema(Schema): - doc = schema.Model().fields["documentation"] - - doc_errors = DocSchema().validate({"doc": data["documentation"]}) - if doc_errors: - # data["documentation"] is not a local relative md file, so we replace it with a placeholder. - # Having access only to the raw data dict, we cannot write the AUTO_CONVERTED_DOCUMENTATION_FILE_NAME file, but - # save the original content of data["documentation"] in data["config"][AUTO_CONVERTED_DOCUMENTATION_FILE_NAME] - # to be written to AUTO_CONVERTED_DOCUMENTATION_FILE_NAME at a later stage. - data["config"] = data.get("config", {}) # make sure config exists - if AUTO_CONVERTED_DOCUMENTATION_FILE_NAME not in data["config"]: - orig_doc = data["documentation"] - assert isinstance(orig_doc, str) - if orig_doc.startswith("http"): - if orig_doc.endswith(".md"): - doc: Union[raw_nodes.URI, str, pathlib.Path] = raw_nodes.URI(orig_doc) - else: - doc = f"Find documentation at {orig_doc}" - else: - doc = pathlib.Path(orig_doc) - - data["config"][AUTO_CONVERTED_DOCUMENTATION_FILE_NAME] = doc - data["documentation"] = AUTO_CONVERTED_DOCUMENTATION_FILE_NAME - - # model version - if "version" in future: - data["version"] = future.pop("version") - - return data - - -def convert_model_v0_3_2_to_v0_3_3(data: Dict[str, Any]) -> Dict[str, Any]: - data["format_version"] = "0.3.3" - if "outputs" in data: - for out in data["outputs"]: - if "shape" in out: - shape = out["shape"] - if isinstance(shape, dict) and "reference_input" in shape: - shape["reference_tensor"] = shape.pop("reference_input") - - return data - - -def maybe_convert(data: Dict[str, Any]) -> Dict[str, Any]: - """auto converts model 'data' to newest format""" - - data = copy.deepcopy(data) - - if data.get("format_version", "0.3.0") == "0.3.0": - # no breaking change, bump to 0.3.1 - data["format_version"] = "0.3.1" - - if data["format_version"] == "0.3.1": - data = convert_model_v0_3_1_to_v0_3_2(data) - - if data["format_version"] == "0.3.2": - data = convert_model_v0_3_2_to_v0_3_3(data) - - if data["format_version"] in ("0.3.3", "0.3.4", "0.3.5"): - data["format_version"] = "0.3.6" - - # remove 'future' from config if no other than the used future entries exist - config = data.get("config", {}) - if config.get("future") == {}: - del config["future"] - - # remove 'config' if now empty - if data.get("config") == {}: - del data["config"] - - return data diff --git a/bioimageio/spec/model/v0_3/raw_nodes.py b/bioimageio/spec/model/v0_3/raw_nodes.py deleted file mode 100644 index a1aeec9cd..000000000 --- a/bioimageio/spec/model/v0_3/raw_nodes.py +++ /dev/null @@ -1,174 +0,0 @@ -import packaging.version -from dataclasses import dataclass -from datetime import datetime -from pathlib import Path -from typing import Any, ClassVar, Dict, List, Tuple, Union - -from marshmallow import missing -from marshmallow.utils import _Missing - -from bioimageio.spec.rdf.v0_2.raw_nodes import Author, Maintainer, RDF_Base -from bioimageio.spec.shared.raw_nodes import ( - Dependencies, - ImplicitOutputShape, - ImportableModule, - ImportableSourceFile, - ParametrizedInputShape, - RawNode, - URI, -) - -try: - from typing import Literal -except ImportError: - from typing_extensions import Literal # type: ignore - -# reassign to use imported classes -Maintainer = Maintainer - -FormatVersion = Literal[ - "0.3.0", "0.3.1", "0.3.2", "0.3.3", "0.3.4", "0.3.5", "0.3.6" -] # newest format needs to be last (used in __init__.py) -Framework = Literal["pytorch", "tensorflow"] -Language = Literal["python", "java"] -PostprocessingName = Literal[ - "binarize", "clip", "scale_linear", "sigmoid", "zero_mean_unit_variance", "scale_range", "scale_mean_variance" -] -PreprocessingName = Literal["binarize", "clip", "scale_linear", "sigmoid", "zero_mean_unit_variance", "scale_range"] -WeightsFormat = Literal[ - "pytorch_state_dict", "pytorch_script", "keras_hdf5", "tensorflow_js", "tensorflow_saved_model_bundle", "onnx" -] - - -@dataclass -class RunMode(RawNode): - name: str = missing - kwargs: Union[_Missing, Dict[str, Any]] = missing - - -@dataclass -class Preprocessing(RawNode): - name: PreprocessingName = missing - kwargs: Union[_Missing, Dict[str, Any]] = missing - - -@dataclass -class Postprocessing(RawNode): - name: PostprocessingName = missing - kwargs: Union[_Missing, Dict[str, Any]] = missing - - -@dataclass -class InputTensor(RawNode): - name: str = missing - data_type: str = missing - axes: str = missing - shape: Union[List[int], ParametrizedInputShape] = missing - preprocessing: Union[_Missing, List[Preprocessing]] = missing - description: Union[_Missing, str] = missing - data_range: Union[_Missing, Tuple[float, float]] = missing - - -@dataclass -class OutputTensor(RawNode): - name: str = missing - data_type: str = missing - axes: str = missing - shape: Union[List[int], ImplicitOutputShape] = missing - halo: Union[_Missing, List[int]] = missing - postprocessing: Union[_Missing, List[Postprocessing]] = missing - description: Union[_Missing, str] = missing - data_range: Union[_Missing, Tuple[float, float]] = missing - - -@dataclass -class _WeightsEntryBase(RawNode): - _include_in_package = ("source",) - weights_format_name: ClassVar[str] # human readable - authors: Union[_Missing, List[Author]] = missing - attachments: Union[_Missing, Dict] = missing - parent: Union[_Missing, str] = missing - sha256: Union[_Missing, str] = missing - source: Union[URI, Path] = missing - - -@dataclass -class KerasHdf5WeightsEntry(_WeightsEntryBase): - weights_format_name = "Keras HDF5" - tensorflow_version: Union[_Missing, packaging.version.Version] = missing - - -@dataclass -class OnnxWeightsEntry(_WeightsEntryBase): - weights_format_name = "ONNX" - opset_version: Union[_Missing, int] = missing - - -@dataclass -class PytorchStateDictWeightsEntry(_WeightsEntryBase): - weights_format_name = "Pytorch State Dict" - - -@dataclass -class PytorchScriptWeightsEntry(_WeightsEntryBase): - weights_format_name = "TorchScript" - - -@dataclass -class TensorflowJsWeightsEntry(_WeightsEntryBase): - weights_format_name = "Tensorflow.js" - tensorflow_version: Union[_Missing, packaging.version.Version] = missing - - -@dataclass -class TensorflowSavedModelBundleWeightsEntry(_WeightsEntryBase): - weights_format_name = "Tensorflow Saved Model" - tensorflow_version: Union[_Missing, packaging.version.Version] = missing - # tag: Union[_Missing, str] = missing # todo: do we need the tag?? - - -WeightsEntry = Union[ - KerasHdf5WeightsEntry, - OnnxWeightsEntry, - PytorchScriptWeightsEntry, - PytorchStateDictWeightsEntry, - TensorflowJsWeightsEntry, - TensorflowSavedModelBundleWeightsEntry, -] - -ImportableSource = Union[ImportableSourceFile, ImportableModule] - - -@dataclass -class ModelParent(RawNode): - uri: Union[URI, Path] = missing - sha256: str = missing - - -@dataclass -class Model(RDF_Base): - _include_in_package = ("covers", "documentation", "test_inputs", "test_outputs") - - authors: List[Author] = missing # type: ignore # base RDF has List[Union[Author, str]], but should change soon - dependencies: Union[_Missing, Dependencies] = missing - format_version: FormatVersion = missing - framework: Union[_Missing, Framework] = missing - inputs: List[InputTensor] = missing - kwargs: Union[_Missing, Dict[str, Any]] = missing - language: Union[_Missing, Language] = missing - license: str = missing - links: Union[_Missing, List[str]] = missing - outputs: List[OutputTensor] = missing - packaged_by: Union[_Missing, List[Author]] = missing - parent: Union[_Missing, ModelParent] = missing - run_mode: Union[_Missing, RunMode] = missing - sample_inputs: Union[_Missing, List[Union[URI, Path]]] = missing - sample_outputs: Union[_Missing, List[Union[URI, Path]]] = missing - sha256: Union[_Missing, str] = missing - timestamp: datetime = missing - type: Literal["model"] = missing - - source: Union[_Missing, ImportableSource] = missing - test_inputs: List[Union[URI, Path]] = missing - test_outputs: List[Union[URI, Path]] = missing - weights: Dict[WeightsFormat, WeightsEntry] = missing diff --git a/bioimageio/spec/model/v0_3/schema.py b/bioimageio/spec/model/v0_3/schema.py deleted file mode 100644 index bb982c8bf..000000000 --- a/bioimageio/spec/model/v0_3/schema.py +++ /dev/null @@ -1,790 +0,0 @@ -import typing -import warnings -from copy import deepcopy -from types import ModuleType - -from marshmallow import ( - RAISE, - ValidationError, - post_load, - pre_dump, - pre_load, - validates_schema, -) -from marshmallow import missing as missing_ - -from bioimageio.spec.rdf import v0_2 as rdf -from bioimageio.spec.shared import field_validators, fields -from bioimageio.spec.shared.common import ValidationWarning, get_args, get_args_flat -from bioimageio.spec.shared.schema import ( - ImplicitOutputShape, - ParametrizedInputShape, - SharedBioImageIOSchema, - SharedProcessingSchema, -) -from bioimageio.spec.shared.utils import get_ref_url - -from . import raw_nodes - -Author = rdf.schema.Author -CiteEntry = rdf.schema.CiteEntry - - -class _BioImageIOSchema(SharedBioImageIOSchema): - raw_nodes: typing.ClassVar[ModuleType] = raw_nodes - - -class RunMode(_BioImageIOSchema): - name = fields.String( - required=True, bioimageio_description="The name of the `run_mode`" - ) # todo: limit valid run mode names - kwargs = fields.Kwargs() - - -class _TensorBase(_BioImageIOSchema): - name = fields.String( - required=True, validate=field_validators.Predicate("isidentifier"), bioimageio_description="Tensor name." - ) - description = fields.String() - axes = fields.Axes( - required=True, - bioimageio_description="""Axes identifying characters from: bitczyx. Same length and order as the axes in `shape`. - -| character | description | -| --- | --- | -| b | batch (groups multiple samples) | -| i | instance/index/element | -| t | time | -| c | channel | -| z | spatial dimension z | -| y | spatial dimension y | -| x | spatial dimension x |""", - ) - data_type = fields.String( - required=True, - bioimageio_description="The data type of this tensor. For inputs, only `float32` is allowed and the consumer " - "software needs to ensure that the correct data type is passed here. For outputs can be any of `float32, " - "float64, (u)int8, (u)int16, (u)int32, (u)int64`. The data flow in bioimage.io models is explained " - "[in this diagram.](https://docs.google.com/drawings/d/1FTw8-Rn6a6nXdkZ_SkMumtcjvur9mtIhRqLwnKqZNHM/edit).", - ) - data_range = fields.Tuple( - (fields.Float(allow_nan=True), fields.Float(allow_nan=True)), - bioimageio_description="Tuple `(minimum, maximum)` specifying the allowed range of the data in this tensor. " - "If not specified, the full data range that can be expressed in `data_type` is allowed.", - ) - shape: fields.Union - - processing_name: str - - @validates_schema - def validate_processing_kwargs(self, data, **kwargs): - axes = data["axes"] - processing_list = data.get(self.processing_name, []) - for processing in processing_list: - kwargs = processing.kwargs or {} - kwarg_axes = kwargs.get("axes", "") - if any(a not in axes for a in kwarg_axes): - raise ValidationError("`kwargs.axes` needs to be subset of axes") - - -class Processing(_BioImageIOSchema): - class binarize(SharedProcessingSchema): - bioimageio_description = ( - "Binarize the tensor with a fixed threshold, values above the threshold will be set to one, values below " - "the threshold to zero." - ) - threshold = fields.Float(required=True, bioimageio_description="The fixed threshold") - - class clip(SharedProcessingSchema): - bioimageio_description = "Set tensor values below min to min and above max to max." - min = fields.Float(required=True, bioimageio_description="minimum value for clipping") - max = fields.Float(required=True, bioimageio_description="maximum value for clipping") - - class scale_linear(SharedProcessingSchema): - bioimageio_description = "Fixed linear scaling." - axes = fields.Axes( - valid_axes="czyx", - bioimageio_description="The subset of axes to scale jointly. " - "For example xy to scale the two image axes for 2d data jointly. " - "The batch axis (b) is not valid here.", - ) - gain = fields.Array( - fields.Float(), - bioimageio_maybe_required=True, - missing=fields.Float(missing=1.0), - bioimageio_description="multiplicative factor", - ) # todo: check if gain match input axes - offset = fields.Array( - fields.Float(), - bioimageio_maybe_required=True, - missing=fields.Float(missing=0.0), - bioimageio_description="additive term", - ) # todo: check if offset match input axes - - @validates_schema - def either_gain_or_offset(self, data, **kwargs): - gain = data["gain"] - if isinstance(gain, (float, int)): - gain = [gain] - - offset = data["offset"] - if isinstance(offset, (float, int)): - offset = [offset] - - if all(g == 1.0 for g in gain) and all(off == 0 for off in offset): - raise ValidationError("Specify gain!=1.0 or offset!=0.0") - - @validates_schema - def kwargs_match_selected_preprocessing_name(self, data, **kwargs): - schema_name = data["name"] - - try: - schema_class = getattr(self, schema_name) - except AttributeError as missing_schema_error: - raise NotImplementedError( - f"Schema {schema_name} for {data['name']} {self.__class__.__name__.lower()}" - ) from missing_schema_error - - kwargs_validation_errors = schema_class().validate(data.get("kwargs", {})) - if kwargs_validation_errors: - raise ValidationError(f"Invalid `kwargs` for '{data['name']}': {kwargs_validation_errors}") - - class sigmoid(SharedProcessingSchema): - bioimageio_description = "" - - class zero_mean_unit_variance(SharedProcessingSchema): - bioimageio_description = "Subtract mean and divide by variance." - mode = fields.ProcMode() - axes = fields.Axes( - valid_axes="czyx", - bioimageio_description="The subset of axes to normalize jointly. For example xy to normalize the two image " - "axes for 2d data jointly. The batch axis (b) is not valid here.", - ) - mean = fields.Array( - fields.Float(), - bioimageio_description="The mean value(s) to use for `mode == fixed`. For example `[1.1, 2.2, 3.3]` in the " - "case of a 3 channel image where the channels are not normalized jointly.", - ) # todo: check if means match input axes (for mode 'fixed') - std = fields.Array( - fields.Float(), - bioimageio_description="The standard deviation values to use for `mode == fixed`. Analogous to mean.", - ) - eps = fields.Float( - missing=1e-6, - bioimageio_description="epsilon for numeric stability: `out = (tensor - mean) / (std + eps)`. " - "Default value: 10^-6.", - ) - - @validates_schema - def mean_and_std_match_mode(self, data, **kwargs): - if data.get("mode", "fixed") == "fixed" and ("mean" not in data or "std" not in data): - raise ValidationError( - "`kwargs` for 'zero_mean_unit_variance' preprocessing with `mode` 'fixed' require additional " - "`kwargs`: `mean` and `std`." - ) - elif data.get("mode", "fixed") != "fixed" and ("mean" in data or "std" in data): - raise ValidationError( - "`kwargs`: `mean` and `std` for 'zero_mean_unit_variance' preprocessing are only valid for `mode` 'fixed'." - ) - - -class Preprocessing(Processing): - name = fields.String( - required=True, - validate=field_validators.OneOf(get_args(raw_nodes.PreprocessingName)), - bioimageio_description=f"Name of preprocessing. One of: {', '.join(get_args(raw_nodes.PreprocessingName))}.", - ) - kwargs = fields.Kwargs( - bioimageio_description=f"Key word arguments as described in [preprocessing spec]" - f"(https://github.com/bioimage-io/spec-bioimage-io/blob/gh-pages/preprocessing_spec_" - f"{'_'.join(get_args(raw_nodes.FormatVersion)[-1].split('.')[:2])}.md)." - ) - - class scale_range(SharedProcessingSchema): - bioimageio_description = "Scale with percentiles." - mode = fields.ProcMode(valid_modes=("per_dataset", "per_sample")) - axes = fields.Axes( - valid_axes="czyx", - bioimageio_description="The subset of axes to normalize jointly. For example xy to normalize the two image " - "axes for 2d data jointly. The batch axis (b) is not valid here.", - ) - min_percentile = fields.Float( - default=0, - validate=field_validators.Range(0, 100, min_inclusive=True, max_inclusive=False), - bioimageio_description="The lower percentile used for normalization, in range 0 to 100. Default value: 0.", - ) - max_percentile = fields.Float( - default=100, - validate=field_validators.Range(1, 100, min_inclusive=False, max_inclusive=True), - bioimageio_description="The upper percentile used for normalization, in range 1 to 100. Has to be bigger " - "than min_percentile. Default value: 100. The range is 1 to 100 instead of 0 to 100 to avoid mistakenly " - "accepting percentiles specified in the range 0.0 to 1.0.", - ) - eps = fields.Float( - missing=1e-6, - bioimageio_description="Epsilon for numeric stability: " - "`out = (tensor - v_lower) / (v_upper - v_lower + eps)`; " - "with `v_lower,v_upper` values at the respective percentiles. Default value: 10^-6.", - ) - - @validates_schema - def min_smaller_max(self, data, **kwargs): - min_p = data.get("min_percentile", 0) - max_p = data.get("max_percentile", 100) - if min_p >= max_p: - raise ValidationError(f"min_percentile {min_p} >= max_percentile {max_p}") - - -class Postprocessing(Processing): - name = fields.String( - validate=field_validators.OneOf(get_args(raw_nodes.PostprocessingName)), - required=True, - bioimageio_description=f"Name of postprocessing. One of: {', '.join(get_args(raw_nodes.PostprocessingName))}.", - ) - kwargs = fields.Kwargs( - bioimageio_description=f"Key word arguments as described in [postprocessing spec]" - f"(https://github.com/bioimage-io/spec-bioimage-io/blob/gh-pages/postprocessing_spec_" - f"{'_'.join(get_args(raw_nodes.FormatVersion)[-1].split('.')[:2])}.md)." - ) - - class scale_range(Preprocessing.scale_range): - reference_tensor = fields.String( - required=False, - validate=field_validators.Predicate("isidentifier"), - bioimageio_description="Tensor name to compute the percentiles from. Default: The tensor itself. " - "If mode==per_dataset this needs to be the name of an input tensor.", - ) - - class scale_mean_variance(SharedProcessingSchema): - bioimageio_description = "Scale the tensor s.t. its mean and variance match a reference tensor." - mode = fields.ProcMode(valid_modes=("per_dataset", "per_sample")) - reference_tensor = fields.String( - required=True, - validate=field_validators.Predicate("isidentifier"), - bioimageio_description="Name of tensor to match.", - ) - axes = fields.Axes( - valid_axes="czyx", - bioimageio_description="The subset of axes to scale jointly. For example xy to normalize the two image " - "axes for 2d data jointly. The batch axis (b) is not valid here. " - "Default: scale all non-batch axes jointly.", - ) - eps = fields.Float( - missing=1e-6, - bioimageio_description="Epsilon for numeric stability: " - "`out = (tensor - mean) / (std + eps) * (ref_std + eps) + ref_mean. " - "Default value: 10^-6.", - ) - - -class InputTensor(_TensorBase): - shape = fields.Union( - [ - fields.ExplicitShape( - bioimageio_description="Exact shape with same length as `axes`, e.g. `shape: [1, 512, 512, 1]`" - ), - fields.Nested( - ParametrizedInputShape(), - bioimageio_description="A sequence of valid shapes given by `shape = min + k * step for k in {0, 1, ...}`.", - ), - ], - required=True, - bioimageio_description="Specification of input tensor shape.", - ) - preprocessing = fields.List( - fields.Nested(Preprocessing()), bioimageio_description="Description of how this input should be preprocessed." - ) - processing_name = "preprocessing" - - @validates_schema - def zero_batch_step_and_one_batch_size(self, data, **kwargs): - axes = data["axes"] - shape = data["shape"] - - bidx = axes.find("b") - if bidx == -1: - return - - if isinstance(shape, raw_nodes.ParametrizedInputShape): - step = shape.step - shape = shape.min - - elif isinstance(shape, list): - step = [0] * len(shape) - else: - raise ValidationError(f"Unknown shape type {type(shape)}") - - if step[bidx] != 0: - raise ValidationError( - "Input shape step has to be zero in the batch dimension (the batch dimension can always be " - "increased, but `step` should specify how to increase the minimal shape to find the largest " - "single batch shape)" - ) - - if shape[bidx] != 1: - raise ValidationError("Input shape has to be 1 in the batch dimension b.") - - -class OutputTensor(_TensorBase): - shape = fields.Union( - [ - fields.ExplicitShape(), - fields.Nested( - ImplicitOutputShape(), - bioimageio_description="In reference to the shape of an input tensor, the shape of the output " - "tensor is `shape = shape(input_tensor) * scale + 2 * offset`.", - ), - ], - required=True, - bioimageio_description="Specification of output tensor shape.", - ) - halo = fields.List( - fields.Integer(), - bioimageio_description=lambda: "Hint to describe the potentially corrupted edge region of the output tensor, due to " - "boundary effects. " - "The `halo` is not cropped by the bioimage.io model, but is left to be cropped by the consumer software. " - f"An example implementation of prediction with tiling, accounting for the halo can be found [here](" - f"{get_ref_url('function', '_predict_with_tiling_impl', 'https://github.com/bioimage-io/core-bioimage-io-python/blob/main/bioimageio/core/prediction.py')}). " - "Use `shape:offset` if the model output itself is cropped and input and output shapes not fixed. ", - ) - postprocessing = fields.List( - fields.Nested(Postprocessing()), - bioimageio_description="Description of how this output should be postprocessed.", - ) - processing_name = "postprocessing" - - @validates_schema - def matching_halo_length(self, data, **kwargs): - shape = data["shape"] - halo = data.get("halo") - if halo is None: - return - elif isinstance(shape, list) or isinstance(shape, raw_nodes.ImplicitOutputShape): - if len(halo) != len(shape): - raise ValidationError(f"halo {halo} has to have same length as shape {shape}!") - else: - raise NotImplementedError(type(shape)) - - -_common_sha256_hint = ( - "You can drag and drop your file to this [online tool]" - "(http://emn178.github.io/online-tools/sha256_checksum.html) to generate it in your browser. " - "Or you can generate the SHA256 code for your model and weights by using for example, `hashlib` in Python. " - "[here is a codesnippet](https://gist.github.com/FynnBe/e64460463df89439cff218bbf59c1100)." -) - - -class _WeightsEntryBase(_BioImageIOSchema): - authors = fields.List( - fields.Nested(Author()), - bioimageio_description="A list of authors. If this is the root weight (it does not have a `parent` field): the " - "person(s) that have trained this model. If this is a child weight (it has a `parent` field): the person(s) " - "who have converted the weights to this format.", - ) # todo: copy root authors if missing - attachments = fields.Dict( - fields.String(), - fields.List(fields.Union([fields.URI(), fields.Raw()])), - bioimageio_description="Dictionary of text keys and list values (that may contain any valid yaml) to " - "additional, relevant files that are specific to the current weight format. A list of URIs can be listed under" - " the `files` key to included additional files for generating the model package.", - ) - parent = fields.String( - bioimageio_description="The source weights used as input for converting the weights to this format. For " - "example, if the weights were converted from the format `pytorch_state_dict` to `pytorch_script`, the parent " - "is `pytorch_state_dict`. All weight entries except one (the initial set of weights resulting from training " - "the model), need to have this field." - ) - sha256 = fields.String( - validate=field_validators.Length(equal=64), - bioimageio_description="SHA256 checksum of the source file specified. " + _common_sha256_hint, - ) - source = fields.Union( - [fields.URI(), fields.Path()], - required=True, - bioimageio_description="URI or path to the weights file. Preferably a url. For multi-file weights " - "(`tensorflow_saved_model_bundle`) this should be a zip archive with all required files/folders.", - ) - weights_format = fields.String( - validate=field_validators.OneOf(get_args(raw_nodes.WeightsFormat)), required=True, load_only=True - ) - - @post_load - def make_object(self, data, **kwargs): - data.pop("weights_format") # weights_format was only used to identify correct WeightsEntry schema - return super().make_object(data, **kwargs) - - @pre_dump - def raise_on_weights_format_mismatch(self, raw_node, **kwargs): - """ - ensures to serialize a raw_nodes.WeightsEntry with the corresponding schema.WeightsEntry - - This check is required, because no validation is performed by marshmallow on serialization, - which disables the Union field to select the appropriate nested schema for serialization. - """ - if self.__class__.__name__ != raw_node.__class__.__name__: - raise TypeError(f"Cannot serialize {raw_node} with {self}") - - return raw_node - - -class KerasHdf5WeightsEntry(_WeightsEntryBase): - bioimageio_description = "Keras HDF5 weights format" - weights_format = fields.String(validate=field_validators.Equal("keras_hdf5"), required=True, load_only=True) - tensorflow_version = fields.Version() - - -class OnnxWeightsEntry(_WeightsEntryBase): - bioimageio_description = "ONNX weights format" - weights_format = fields.String(validate=field_validators.Equal("onnx"), required=True, load_only=True) - opset_version = fields.Integer() - - -class PytorchStateDictWeightsEntry(_WeightsEntryBase): - bioimageio_description = "PyTorch state dictionary weights format" - weights_format = fields.String(validate=field_validators.Equal("pytorch_state_dict"), required=True, load_only=True) - - -class PytorchScriptWeightsEntry(_WeightsEntryBase): - bioimageio_description = "Torch Script weights format" - weights_format = fields.String(validate=field_validators.Equal("pytorch_script"), required=True, load_only=True) - - -class TensorflowJsWeightsEntry(_WeightsEntryBase): - bioimageio_description = "Tensorflow Javascript weights format" - weights_format = fields.String(validate=field_validators.Equal("tensorflow_js"), required=True, load_only=True) - tensorflow_version = fields.Version() - - -class TensorflowSavedModelBundleWeightsEntry(_WeightsEntryBase): - bioimageio_description = "Tensorflow Saved Model Bundle weights format" - weights_format = fields.String( - validate=field_validators.Equal("tensorflow_saved_model_bundle"), required=True, load_only=True - ) - tensorflow_version = fields.Version() - - -WeightsEntry = typing.Union[ - PytorchStateDictWeightsEntry, - PytorchScriptWeightsEntry, - KerasHdf5WeightsEntry, - TensorflowJsWeightsEntry, - TensorflowSavedModelBundleWeightsEntry, - OnnxWeightsEntry, -] - - -class ModelParent(_BioImageIOSchema): - uri = fields.Union( # todo: allow URI or DOI instead (and not local path!?) - [fields.URI(), fields.Path()], - bioimageio_description="Url of another model available on bioimage.io or path to a local model in the " - "bioimage.io specification. If it is a url, it needs to be a github url linking to the page containing the " - "model (NOT the raw file).", - ) - sha256 = fields.SHA256(bioimageio_description="Hash of the parent model RDF.") - - -class Model(rdf.schema.RDF): - raw_nodes: typing.ClassVar[ModuleType] = raw_nodes - - class Meta: - unknown = RAISE - - bioimageio_description = f"""# BioImage.IO Model Resource Description File Specification {get_args(raw_nodes.FormatVersion)[-1]} -This specification defines the fields used in a BioImage.IO-compliant resource description file (`RDF`) for describing AI models with pretrained weights. -These fields are typically stored in YAML files which we called Model Resource Description Files or `model RDF`. -The model RDFs can be downloaded or uploaded to the bioimage.io website, produced or consumed by BioImage.IO-compatible consumers(e.g. image analysis software or other website). - -The model RDF YAML file contains mandatory and optional fields. In the following description, optional fields are indicated by _optional_. -_optional*_ with an asterisk indicates the field is optional depending on the value in another field. -""" - # todo: unify authors with RDF (optional or required?) - authors = fields.List( - fields.Nested(Author()), required=True, bioimageio_description=rdf.schema.RDF.authors_bioimageio_description - ) - - badges = missing_ - cite = fields.List( - fields.Nested(CiteEntry()), - required=True, # todo: unify authors with RDF (optional or required?) - bioimageio_description=rdf.schema.RDF.cite_bioimageio_description, - ) - - documentation = fields.Union( - [ - fields.URL(), - fields.Path(), - ], - required=True, - bioimageio_description="Relative path to file with additional documentation in markdown. This means: 1) only " - "relative file path is allowed 2) the file must be in markdown format with `.md` file name extension 3) URL is " - "not allowed. It is recommended to use `README.md` as the documentation name.", - ) - - download_url = missing_ - - dependencies = fields.Dependencies( # todo: add validation (0.4.0?) - bioimageio_description="Dependency manager and dependency file, specified as `:`. For example: 'conda:./environment.yaml', 'maven:./pom.xml', or 'pip:./requirements.txt'" - ) - - format_version = fields.String( - validate=field_validators.OneOf(get_args_flat(raw_nodes.FormatVersion)), - required=True, - bioimageio_description_order=0, - bioimageio_description=f"""Version of the BioImage.IO Model Resource Description File Specification used. -This is mandatory, and important for the consumer software to verify before parsing the fields. -The recommended behavior for the implementation is to keep backward compatibility and throw an error if the model yaml -is in an unsupported format version. The current format version described here is -{get_args(raw_nodes.FormatVersion)[-1]}""", - ) - - framework = fields.String( - validate=field_validators.OneOf(get_args(raw_nodes.Framework)), - bioimageio_description=f"The deep learning framework of the source code. One of: " - f"{', '.join(get_args(raw_nodes.Framework))}. This field is only required if the field `source` is present.", - ) - - git_repo = fields.String( - validate=field_validators.URL(schemes=["http", "https"]), - bioimageio_description=rdf.schema.RDF.git_repo_bioimageio_description - + "If the model is contained in a subfolder of a git repository, then a url to the exact folder" - + "(which contains the configuration yaml file) should be used.", - ) - - icon = missing_ - - kwargs = fields.Kwargs( - bioimageio_description="Keyword arguments for the implementation specified by `source`. " - "This field is only required if the field `source` is present." - ) - - language = fields.String( - validate=field_validators.OneOf(get_args(raw_nodes.Language)), - bioimageio_maybe_required=True, - bioimageio_description=f"Programming language of the source code. One of: " - f"{', '.join(get_args(raw_nodes.Language))}. This field is only required if the field `source` is present.", - ) - - license = fields.String( - required=True, # todo: unify license with RDF (optional or required?) - bioimageio_description=rdf.schema.RDF.license_bioimageio_description, - ) - - name = fields.String( - # validate=field_validators.Length(max=36), # todo: enforce in future version (0.4.0?) - required=True, - bioimageio_description="Name of this model. It should be human-readable and only contain letters, numbers, " - "underscore '_', minus '-' or spaces and not be longer than 36 characters.", - ) - - packaged_by = fields.List( - fields.Nested(Author()), - bioimageio_description=f"The persons that have packaged and uploaded this model. Only needs to be specified if " - f"different from `authors` in root or any entry in `weights`.", - ) - - parent = fields.Nested( - ModelParent(), - bioimageio_description="Parent model from which the trained weights of this model have been derived, e.g. by " - "finetuning the weights of this model on a different dataset. For format changes of the same trained model " - "checkpoint, see `weights`.", - ) - - run_mode = fields.Nested( - RunMode(), - bioimageio_description="Custom run mode for this model: for more complex prediction procedures like test time " - "data augmentation that currently cannot be expressed in the specification. " - "No standard run modes are defined yet.", - ) - - sha256 = fields.String( - validate=field_validators.Length(equal=64), - bioimageio_description="SHA256 checksum of the model source code file." - + _common_sha256_hint - + " This field is only required if the field source is present.", - ) - - source = fields.ImportableSource( - bioimageio_maybe_required=True, - bioimageio_description="Language and framework specific implementation. As some weights contain the model " - "architecture, the source is optional depending on the present weight formats. `source` can either point to a " - "local implementation: `:` or the " - "implementation in an available dependency: `..`.\nFor example: " - "`my_function.py:MyImplementation` or `core_library.some_module.some_function`.", - ) - - timestamp = fields.DateTime( - required=True, - bioimageio_description="Timestamp of the initial creation of this model in [ISO 8601]" - "(#https://en.wikipedia.org/wiki/ISO_8601) format.", - ) - - weights = fields.Dict( - fields.String( - validate=field_validators.OneOf(get_args(raw_nodes.WeightsFormat)), - required=True, - bioimageio_description=f"Format of this set of weights. Weight formats can define additional (optional or " - f"required) fields. See [weight_formats_spec_0_3.md]" - f"(https://github.com/bioimage-io/spec-bioimage-io/blob/gh-pages/weight_formats_spec_0_3.md). " - f"One of: {', '.join(get_args(raw_nodes.WeightsFormat))}", - ), - fields.Union([fields.Nested(we()) for we in get_args(WeightsEntry)]), - required=True, - bioimageio_description="The weights for this model. Weights can be given for different formats, but should " - "otherwise be equivalent. The available weight formats determine which consumers can use this model.", - ) - - @pre_load - def add_weights_format_key_to_weights_entry_value(self, data: dict, many=False, partial=False, **kwargs): - data = deepcopy(data) # Schema.validate() calls pre_load methods, thus we should not modify the input data - if many or partial: - raise NotImplementedError - - for weights_format, weights_entry in data.get("weights", {}).items(): - if "weights_format" in weights_entry: - raise ValidationError(f"Got unexpected key 'weights_format' in weights entry {weights_format}") - - weights_entry["weights_format"] = weights_format - - return data - - inputs = fields.List( - fields.Nested(InputTensor()), bioimageio_description="Describes the input tensors expected by this model." - ) - outputs = fields.List( - fields.Nested(OutputTensor()), bioimageio_description="Describes the output tensors from this model." - ) - - test_inputs = fields.List( - fields.Union([fields.URI(), fields.Path()]), - required=True, - bioimageio_description="List of URIs or local relative paths to test inputs as described in inputs for " - "**a single test case**. " - "This means if your model has more than one input, you should provide one URI for each input. " - "Each test input should be a file with a ndarray in " - "[numpy.lib file format](https://numpy.org/doc/stable/reference/generated/numpy.lib.format.html#module-numpy.lib.format)." - "The extension must be '.npy'.", - ) - test_outputs = fields.List( - fields.Union([fields.URI(), fields.Path()]), - required=True, - bioimageio_description="Analog to to test_inputs.", - ) - - sample_inputs = fields.List( - fields.Union([fields.URI(), fields.Path()]), - bioimageio_description="List of URIs/local relative paths to sample inputs to illustrate possible inputs for " - "the model, for example stored as png or tif images. " - "The model is not tested with these sample files that serve to inform a human user about an example use case.", - ) - sample_outputs = fields.List( - fields.Union([fields.URI(), fields.Path()]), - bioimageio_description="List of URIs/local relative paths to sample outputs corresponding to the " - "`sample_inputs`.", - ) - - config = fields.YamlDict( - bioimageio_description=rdf.schema.RDF.config_bioimageio_description - + """ - - For example: - ```yaml - config: - # custom config for DeepImageJ, see https://github.com/bioimage-io/configuration/issues/23 - deepimagej: - model_keys: - # In principle the tag "SERVING" is used in almost every tf model - model_tag: tf.saved_model.tag_constants.SERVING - # Signature definition to call the model. Again "SERVING" is the most general - signature_definition: tf.saved_model.signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY - test_information: - input_size: [2048x2048] # Size of the input images - output_size: [1264x1264 ]# Size of all the outputs - device: cpu # Device used. In principle either cpu or GPU - memory_peak: 257.7 Mb # Maximum memory consumed by the model in the device - runtime: 78.8s # Time it took to run the model - pixel_size: [9.658E-4ยตmx9.658E-4ยตm] # Size of the pixels of the input - ``` -""" - ) - - @validates_schema - def language_and_framework_match(self, data, **kwargs): - field_names = ("language", "framework") - valid_combinations = [ - ("python", "scikit-learn"), # todo: remove - ("python", "pytorch"), - ("python", "tensorflow"), - ("java", "tensorflow"), - ] - if "source" not in data: - valid_combinations.append((missing_, missing_)) - valid_combinations.append(("python", missing_)) - valid_combinations.append(("java", missing_)) - - combination = tuple(data.get(name, missing_) for name in field_names) - if combination not in valid_combinations: - raise ValidationError(f"invalid combination of {dict(zip(field_names, combination))}") - - @validates_schema - def source_specified_if_required(self, data, **kwargs): - if "source" in data: - return - - weights_format_requires_source = { - "pytorch_state_dict": True, - "pytorch_script": False, - "keras_hdf5": False, - "tensorflow_js": False, - "tensorflow_saved_model_bundle": False, - "onnx": False, - } - require_source = {wf for wf in data["weights"] if weights_format_requires_source[wf]} - if require_source: - raise ValidationError( - f"These specified weight formats require source code to be specified: {require_source}" - ) - - @validates_schema - def validate_reference_tensor_names(self, data, **kwargs): - valid_input_tensor_references = [ipt.name for ipt in data["inputs"]] - for out in data["outputs"]: - if out.postprocessing is missing_: - continue - - for postpr in out.postprocessing: - if postpr.kwargs is missing_: - continue - - ref_tensor = postpr.kwargs.get("reference_tensor", missing_) - if ref_tensor is not missing_ and ref_tensor not in valid_input_tensor_references: - raise ValidationError(f"{ref_tensor} not found in inputs") - - @validates_schema - def weights_entries_match_weights_formats(self, data, **kwargs) -> None: - weights: typing.Dict[str, _WeightsEntryBase] = data["weights"] - for weights_format, weights_entry in weights.items(): - if weights_format in ["keras_hdf5", "tensorflow_js", "tensorflow_saved_model_bundle"]: - assert isinstance( - weights_entry, - ( - raw_nodes.KerasHdf5WeightsEntry, - raw_nodes.TensorflowJsWeightsEntry, - raw_nodes.TensorflowSavedModelBundleWeightsEntry, - ), - ) - if weights_entry.tensorflow_version is missing_: - # todo: raise ValidationError (allow -> require)? - warnings.warn( - f"weights:{weights_format}: missing 'tensorflow_version' entry for weights format {weights_format}", - category=ValidationWarning, - ) - - if weights_format == "onnx": - assert isinstance(weights_entry, raw_nodes.OnnxWeightsEntry) - if weights_entry.opset_version is missing_: - # todo: raise ValidationError? - warnings.warn( - f"weights:{weights_format}: missing 'opset_version' entry for weights format {weights_format}", - category=ValidationWarning, - ) diff --git a/bioimageio/spec/model/v0_3/utils.py b/bioimageio/spec/model/v0_3/utils.py deleted file mode 100644 index 5bf5b820b..000000000 --- a/bioimageio/spec/model/v0_3/utils.py +++ /dev/null @@ -1,20 +0,0 @@ -from copy import deepcopy -from typing import Optional, Sequence - -from . import raw_nodes - - -def filter_resource_description( - raw_rd: raw_nodes.Model, weights_priority_order: Optional[Sequence[raw_nodes.WeightsFormat]] = None -) -> raw_nodes.Model: - # filter weights - raw_rd = deepcopy(raw_rd) - if weights_priority_order is not None: - for wfp in weights_priority_order: - if wfp in raw_rd.weights: - raw_rd.weights = {wfp: raw_rd.weights[wfp]} - break - else: - raise ValueError(f"Not found any of the specified weights formats {weights_priority_order}") - - return raw_rd diff --git a/bioimageio/spec/model/v0_4.py b/bioimageio/spec/model/v0_4.py new file mode 100644 index 000000000..e3a2f3af3 --- /dev/null +++ b/bioimageio/spec/model/v0_4.py @@ -0,0 +1,1129 @@ +from __future__ import annotations + +import collections.abc +from typing import ( + Any, + ClassVar, + Dict, + FrozenSet, + List, + Literal, + Optional, + Sequence, + Tuple, + Union, +) + +import numpy as np +from annotated_types import Ge, Interval, MaxLen, MinLen, MultipleOf +from numpy.typing import NDArray +from pydantic import ( + AllowInfNan, + Field, + TypeAdapter, + ValidationInfo, + field_validator, + model_validator, +) +from typing_extensions import Annotated, LiteralString, Self, assert_never + +from bioimageio.spec._internal.validated_string import ValidatedString + +from .._internal.common_nodes import ( + KwargsNode, + Node, + NodeWithExplicitlySetFields, + StringNode, +) +from .._internal.constants import SHA256_HINT +from .._internal.field_validation import ( + AfterValidator, + RestrictCharacters, + validate_unique_entries, +) +from .._internal.field_warning import issue_warning, warn +from .._internal.io import ( + BioimageioYamlContent, + WithSuffix, + download, + include_in_package_serializer, +) +from .._internal.io import FileDescr as FileDescr +from .._internal.io import Sha256 as Sha256 +from .._internal.io_basics import AbsoluteFilePath as AbsoluteFilePath +from .._internal.types import Datetime as Datetime +from .._internal.types import Identifier as Identifier +from .._internal.types import ( + ImportantFileSource, + LowerCaseIdentifierAnno, +) +from .._internal.types import LicenseId as LicenseId +from .._internal.types import ModelId as ModelId +from .._internal.types import NotEmpty as NotEmpty +from .._internal.types import ResourceId as ResourceId +from .._internal.url import HttpUrl as HttpUrl +from .._internal.version_type import Version as Version +from .._internal.warning_levels import ALERT, INFO +from ..dataset.v0_2 import DatasetDescr as DatasetDescr +from ..dataset.v0_2 import LinkedDataset as LinkedDataset +from ..generic.v0_2 import AttachmentsDescr as AttachmentsDescr +from ..generic.v0_2 import Author as Author +from ..generic.v0_2 import BadgeDescr as BadgeDescr +from ..generic.v0_2 import CiteEntry as CiteEntry +from ..generic.v0_2 import Doi as Doi +from ..generic.v0_2 import GenericModelDescrBase +from ..generic.v0_2 import LinkedResource as LinkedResource +from ..generic.v0_2 import Maintainer as Maintainer +from ..generic.v0_2 import OrcidId as OrcidId +from ..generic.v0_2 import RelativeFilePath as RelativeFilePath +from ..generic.v0_2 import Uploader as Uploader +from ..utils import load_array +from ._v0_4_converter import convert_from_older_format + +AxesStr = Annotated[ + str, RestrictCharacters("bitczyx"), AfterValidator(validate_unique_entries) +] +AxesInCZYX = Annotated[ + str, RestrictCharacters("czyx"), AfterValidator(validate_unique_entries) +] + +PostprocessingName = Literal[ + "binarize", + "clip", + "scale_linear", + "sigmoid", + "zero_mean_unit_variance", + "scale_range", + "scale_mean_variance", +] +PreprocessingName = Literal[ + "binarize", + "clip", + "scale_linear", + "sigmoid", + "zero_mean_unit_variance", + "scale_range", +] + +TensorName = ValidatedString[LowerCaseIdentifierAnno] + + +class CallableFromDepencency(StringNode): + _pattern = r"^.+\..+$" + _submodule_adapter = TypeAdapter(Identifier) + + module_name: str + + @field_validator("module_name", mode="after") + def check_submodules(cls, module_name: str) -> str: + for submod in module_name.split("."): + _ = cls._submodule_adapter.validate_python(submod) + + return module_name + + callable_name: Identifier + + @classmethod + def _get_data(cls, valid_string_data: str): + *mods, callname = valid_string_data.split(".") + return dict(module_name=".".join(mods), callable_name=callname) + + +class CallableFromFile(StringNode): + _pattern = r"^.+:.+$" + source_file: Annotated[ + Union[HttpUrl, RelativeFilePath], + include_in_package_serializer, + ] + """โˆˆ๐Ÿ“ฆ Python module that implements `callable_name`""" + callable_name: Identifier + """The Python identifier of """ + + @classmethod + def _get_data(cls, valid_string_data: str): + *file_parts, callname = valid_string_data.split(":") + return dict(source_file=":".join(file_parts), callable_name=callname) + + +CustomCallable = Union[CallableFromFile, CallableFromDepencency] + + +class Dependencies(StringNode): + _pattern = r"^.+:.+$" + manager: Annotated[NotEmpty[str], Field(examples=["conda", "maven", "pip"])] + """Dependency manager""" + + file: Annotated[ + ImportantFileSource, + Field(examples=["environment.yaml", "pom.xml", "requirements.txt"]), + ] + """โˆˆ๐Ÿ“ฆ Dependency file""" + + @classmethod + def _get_data(cls, valid_string_data: str): + manager, *file_parts = valid_string_data.split(":") + return dict(manager=manager, file=":".join(file_parts)) + + +WeightsFormat = Literal[ + "keras_hdf5", + "onnx", + "pytorch_state_dict", + "tensorflow_js", + "tensorflow_saved_model_bundle", + "torchscript", +] + + +class WeightsDescr(Node): + keras_hdf5: Optional[KerasHdf5WeightsDescr] = None + onnx: Optional[OnnxWeightsDescr] = None + pytorch_state_dict: Optional[PytorchStateDictWeightsDescr] = None + tensorflow_js: Optional[TensorflowJsWeightsDescr] = None + tensorflow_saved_model_bundle: Optional[TensorflowSavedModelBundleWeightsDescr] = ( + None + ) + torchscript: Optional[TorchscriptWeightsDescr] = None + + @model_validator(mode="after") + def check_one_entry(self) -> Self: + if all( + entry is None + for entry in [ + self.keras_hdf5, + self.onnx, + self.pytorch_state_dict, + self.tensorflow_js, + self.tensorflow_saved_model_bundle, + self.torchscript, + ] + ): + raise ValueError("Missing weights entry") + + return self + + +class WeightsEntryDescrBase(FileDescr): + type: ClassVar[WeightsFormat] + weights_format_name: ClassVar[str] # human readable + + source: ImportantFileSource + """โˆˆ๐Ÿ“ฆ The weights file.""" + + attachments: Annotated[ + Union[AttachmentsDescr, None], + warn(None, "Weights entry depends on additional attachments.", ALERT), + ] = None + """Attachments that are specific to this weights entry.""" + + authors: Union[List[Author], None] = None + """Authors + Either the person(s) that have trained this model resulting in the original weights file. + (If this is the initial weights entry, i.e. it does not have a `parent`) + Or the person(s) who have converted the weights to this weights format. + (If this is a child weight, i.e. it has a `parent` field) + """ + + dependencies: Annotated[ + Optional[Dependencies], + warn( + None, + "Custom dependencies ({value}) specified. Avoid this whenever possible " + + "to allow execution in a wider range of software environments.", + ), + Field( + examples=[ + "conda:environment.yaml", + "maven:./pom.xml", + "pip:./requirements.txt", + ] + ), + ] = None + """Dependency manager and dependency file, specified as `:`.""" + + parent: Annotated[ + Optional[WeightsFormat], Field(examples=["pytorch_state_dict"]) + ] = None + """The source weights these weights were converted from. + For example, if a model's weights were converted from the `pytorch_state_dict` format to `torchscript`, + The `pytorch_state_dict` weights entry has no `parent` and is the parent of the `torchscript` weights. + All weight entries except one (the initial set of weights resulting from training the model), + need to have this field.""" + + @model_validator(mode="after") + def check_parent_is_not_self(self) -> Self: + if self.type == self.parent: + raise ValueError("Weights entry can't be it's own parent.") + + return self + + +class KerasHdf5WeightsDescr(WeightsEntryDescrBase): + type = "keras_hdf5" + weights_format_name: ClassVar[str] = "Keras HDF5" + tensorflow_version: Optional[Version] = None + """TensorFlow version used to create these weights""" + + @field_validator("tensorflow_version", mode="after") + @classmethod + def _tfv(cls, value: Any): + if value is None: + issue_warning( + "Missing TensorFlow version. Please specify the TensorFlow version" + + " these weights were created with.", + value=value, + severity=ALERT, + ) + return value + + +class OnnxWeightsDescr(WeightsEntryDescrBase): + type = "onnx" + weights_format_name: ClassVar[str] = "ONNX" + opset_version: Optional[Annotated[int, Ge(7)]] = None + """ONNX opset version""" + + @field_validator("opset_version", mode="after") + @classmethod + def _ov(cls, value: Any): + if value is None: + issue_warning( + "Missing ONNX opset version (aka ONNX opset number). " + + "Please specify the ONNX opset version these weights were created" + + " with.", + value=value, + severity=ALERT, + ) + return value + + +class PytorchStateDictWeightsDescr(WeightsEntryDescrBase): + type = "pytorch_state_dict" + weights_format_name: ClassVar[str] = "Pytorch State Dict" + architecture: CustomCallable = Field( + examples=["my_function.py:MyNetworkClass", "my_module.submodule.get_my_model"] + ) + """callable returning a torch.nn.Module instance. + Local implementation: `:`. + Implementation in a dependency: `.<[dependency-module]>.`.""" + + architecture_sha256: Annotated[ + Optional[Sha256], + Field( + description=( + "The SHA256 of the architecture source file, if the architecture is not" + " defined in a module listed in `dependencies`\n" + ) + + SHA256_HINT, + ), + ] = None + """The SHA256 of the architecture source file, + if the architecture is not defined in a module listed in `dependencies`""" + + @model_validator(mode="after") + def check_architecture_sha256(self) -> Self: + if isinstance(self.architecture, CallableFromFile): + if self.architecture_sha256 is None: + raise ValueError( + "Missing required `architecture_sha256` for `architecture` with" + + " source file." + ) + elif self.architecture_sha256 is not None: + raise ValueError( + "Got `architecture_sha256` for architecture that does not have a source" + + " file." + ) + + return self + + kwargs: Dict[str, Any] = Field(default_factory=dict) + """key word arguments for the `architecture` callable""" + + pytorch_version: Optional[Version] = None + """Version of the PyTorch library used. + If `depencencies` is specified it should include pytorch and the verison has to match. + (`dependencies` overrules `pytorch_version`)""" + + @field_validator("pytorch_version", mode="after") + @classmethod + def _ptv(cls, value: Any): + if value is None: + issue_warning( + "Missing PyTorch version. Please specify the PyTorch version these" + + " PyTorch state dict weights were created with.", + value=value, + severity=ALERT, + ) + return value + + +class TorchscriptWeightsDescr(WeightsEntryDescrBase): + type = "torchscript" + weights_format_name: ClassVar[str] = "TorchScript" + pytorch_version: Optional[Version] = None + """Version of the PyTorch library used.""" + + @field_validator("pytorch_version", mode="after") + @classmethod + def _ptv(cls, value: Any): + if value is None: + issue_warning( + "Missing PyTorch version. Please specify the PyTorch version these" + + " Torchscript weights were created with.", + value=value, + severity=ALERT, + ) + return value + + +class TensorflowJsWeightsDescr(WeightsEntryDescrBase): + type = "tensorflow_js" + weights_format_name: ClassVar[str] = "Tensorflow.js" + tensorflow_version: Optional[Version] = None + """Version of the TensorFlow library used.""" + + @field_validator("tensorflow_version", mode="after") + @classmethod + def _tfv(cls, value: Any): + if value is None: + issue_warning( + "Missing TensorFlow version. Please specify the TensorFlow version" + + " these TensorflowJs weights were created with.", + value=value, + severity=ALERT, + ) + return value + + source: ImportantFileSource + """โˆˆ๐Ÿ“ฆ The multi-file weights. + All required files/folders should be a zip archive.""" + + +class TensorflowSavedModelBundleWeightsDescr(WeightsEntryDescrBase): + type = "tensorflow_saved_model_bundle" + weights_format_name: ClassVar[str] = "Tensorflow Saved Model" + tensorflow_version: Optional[Version] = None + """Version of the TensorFlow library used.""" + + @field_validator("tensorflow_version", mode="after") + @classmethod + def _tfv(cls, value: Any): + if value is None: + issue_warning( + "Missing TensorFlow version. Please specify the TensorFlow version" + + " these Tensorflow saved model bundle weights were created with.", + value=value, + severity=ALERT, + ) + return value + + +class ParameterizedInputShape(Node): + """A sequence of valid shapes given by `shape_k = min + k * step for k in {0, 1, ...}`.""" + + min: NotEmpty[List[int]] + """The minimum input shape""" + + step: NotEmpty[List[int]] + """The minimum shape change""" + + def __len__(self) -> int: + return len(self.min) + + @model_validator(mode="after") + def matching_lengths(self) -> Self: + if len(self.min) != len(self.step): + raise ValueError("`min` and `step` required to have the same length") + + return self + + +class ImplicitOutputShape(Node): + """Output tensor shape depending on an input tensor shape. + `shape(output_tensor) = shape(input_tensor) * scale + 2 * offset`""" + + reference_tensor: TensorName + """Name of the reference tensor.""" + + scale: NotEmpty[List[Union[float, None]]] + """output_pix/input_pix for each dimension. + 'null' values indicate new dimensions, whose length is defined by 2*`offset`""" + + offset: NotEmpty[List[Union[int, Annotated[float, MultipleOf(0.5)]]]] + """Position of origin wrt to input.""" + + def __len__(self) -> int: + return len(self.scale) + + @model_validator(mode="after") + def matching_lengths(self) -> Self: + if len(self.scale) != len(self.offset): + raise ValueError( + f"scale {self.scale} has to have same length as offset {self.offset}!" + ) + # if we have an expanded dimension, make sure that it's offet is not zero + for sc, off in zip(self.scale, self.offset): + if sc is None and not off: + raise ValueError("`offset` must not be zero if `scale` is none/zero") + + return self + + +class TensorDescrBase(Node): + name: TensorName + """Tensor name. No duplicates are allowed.""" + + description: str = "" + + axes: AxesStr + """Axes identifying characters. Same length and order as the axes in `shape`. + | axis | description | + | --- | --- | + | b | batch (groups multiple samples) | + | i | instance/index/element | + | t | time | + | c | channel | + | z | spatial dimension z | + | y | spatial dimension y | + | x | spatial dimension x | + """ + + data_range: Optional[ + Tuple[Annotated[float, AllowInfNan(True)], Annotated[float, AllowInfNan(True)]] + ] = None + """Tuple `(minimum, maximum)` specifying the allowed range of the data in this tensor. + If not specified, the full data range that can be expressed in `data_type` is allowed.""" + + +class ProcessingKwargs(KwargsNode): + """base class for pre-/postprocessing key word arguments""" + + +class ProcessingDescrBase(NodeWithExplicitlySetFields): + """processing base class""" + + # name: Literal[PreprocessingName, PostprocessingName] # todo: make abstract field + fields_to_set_explicitly: ClassVar[FrozenSet[LiteralString]] = frozenset({"name"}) + + +class BinarizeKwargs(ProcessingKwargs): + threshold: float + """The fixed threshold""" + + +class BinarizeDescr(ProcessingDescrBase): + """BinarizeDescr the tensor with a fixed threshold. + Values above the threshold will be set to one, values below the threshold to zero. + """ + + name: Literal["binarize"] = "binarize" + kwargs: BinarizeKwargs + + +class ClipKwargs(ProcessingKwargs): + min: float + """minimum value for clipping""" + max: float + """maximum value for clipping""" + + +class ClipDescr(ProcessingDescrBase): + """Set tensor values below min to min and above max to max.""" + + name: Literal["clip"] = "clip" + + kwargs: ClipKwargs + + +class ScaleLinearKwargs(ProcessingKwargs): + axes: Annotated[Optional[AxesInCZYX], Field(examples=["xy"])] = None + """The subset of axes to scale jointly. + For example xy to scale the two image axes for 2d data jointly.""" + + gain: Union[float, List[float]] = 1.0 + """multiplicative factor""" + + offset: Union[float, List[float]] = 0.0 + """additive term""" + + @model_validator(mode="after") + def either_gain_or_offset(self) -> Self: + if ( + self.gain == 1.0 + or isinstance(self.gain, list) + and all(g == 1.0 for g in self.gain) + ) and ( + self.offset == 0.0 + or isinstance(self.offset, list) + and all(off == 0.0 for off in self.offset) + ): + raise ValueError( + "Redunt linear scaling not allowd. Set `gain` != 1.0 and/or `offset` !=" + + " 0.0." + ) + + return self + + +class ScaleLinearDescr(ProcessingDescrBase): + """Fixed linear scaling.""" + + name: Literal["scale_linear"] = "scale_linear" + kwargs: ScaleLinearKwargs + + +class SigmoidDescr(ProcessingDescrBase): + """The logistic sigmoid funciton, a.k.a. expit function.""" + + name: Literal["sigmoid"] = "sigmoid" + + @property + def kwargs(self) -> ProcessingKwargs: + """empty kwargs""" + return ProcessingKwargs() + + +class ZeroMeanUnitVarianceKwargs(ProcessingKwargs): + mode: Literal["fixed", "per_dataset", "per_sample"] = "fixed" + """Mode for computing mean and variance. + | mode | description | + | ----------- | ------------------------------------ | + | fixed | Fixed values for mean and variance | + | per_dataset | Compute for the entire dataset | + | per_sample | Compute for each sample individually | + """ + axes: Annotated[AxesInCZYX, Field(examples=["xy"])] + """The subset of axes to normalize jointly. + For example `xy` to normalize the two image axes for 2d data jointly.""" + + mean: Annotated[ + Union[float, NotEmpty[List[float]], None], Field(examples=[(1.1, 2.2, 3.3)]) + ] = None + """The mean value(s) to use for `mode: fixed`. + For example `[1.1, 2.2, 3.3]` in the case of a 3 channel image with `axes: xy`.""" + # todo: check if means match input axes (for mode 'fixed') + + std: Annotated[ + Union[float, NotEmpty[List[float]], None], Field(examples=[(0.1, 0.2, 0.3)]) + ] = None + """The standard deviation values to use for `mode: fixed`. Analogous to mean.""" + + eps: Annotated[float, Interval(gt=0, le=0.1)] = 1e-6 + """epsilon for numeric stability: `out = (tensor - mean) / (std + eps)`.""" + + @model_validator(mode="after") + def mean_and_std_match_mode(self) -> Self: + if self.mode == "fixed" and (self.mean is None or self.std is None): + raise ValueError("`mean` and `std` are required for `mode: fixed`.") + elif self.mode != "fixed" and (self.mean is not None or self.std is not None): + raise ValueError(f"`mean` and `std` not allowed for `mode: {self.mode}`") + + return self + + +class ZeroMeanUnitVarianceDescr(ProcessingDescrBase): + """Subtract mean and divide by variance.""" + + name: Literal["zero_mean_unit_variance"] = "zero_mean_unit_variance" + kwargs: ZeroMeanUnitVarianceKwargs + + +class ScaleRangeKwargs(ProcessingKwargs): + mode: Literal["per_dataset", "per_sample"] + """Mode for computing percentiles. + | mode | description | + | ----------- | ------------------------------------ | + | per_dataset | compute for the entire dataset | + | per_sample | compute for each sample individually | + """ + axes: Annotated[AxesInCZYX, Field(examples=["xy"])] + """The subset of axes to normalize jointly. + For example xy to normalize the two image axes for 2d data jointly.""" + + min_percentile: Annotated[Union[int, float], Interval(ge=0, lt=100)] = 0.0 + """The lower percentile used for normalization.""" + + max_percentile: Annotated[Union[int, float], Interval(gt=1, le=100)] = 100.0 + """The upper percentile used for normalization + Has to be bigger than `min_percentile`. + The range is 1 to 100 instead of 0 to 100 to avoid mistakenly + accepting percentiles specified in the range 0.0 to 1.0.""" + + @model_validator(mode="after") + def min_smaller_max(self, info: ValidationInfo) -> Self: + if self.min_percentile >= self.max_percentile: + raise ValueError( + f"min_percentile {self.min_percentile} >= max_percentile" + + f" {self.max_percentile}" + ) + + return self + + eps: Annotated[float, Interval(gt=0, le=0.1)] = 1e-6 + """Epsilon for numeric stability. + `out = (tensor - v_lower) / (v_upper - v_lower + eps)`; + with `v_lower,v_upper` values at the respective percentiles.""" + + reference_tensor: Optional[TensorName] = None + """Tensor name to compute the percentiles from. Default: The tensor itself. + For any tensor in `inputs` only input tensor references are allowed. + For a tensor in `outputs` only input tensor refereences are allowed if `mode: per_dataset`""" + + +class ScaleRangeDescr(ProcessingDescrBase): + """Scale with percentiles.""" + + name: Literal["scale_range"] = "scale_range" + kwargs: ScaleRangeKwargs + + +class ScaleMeanVarianceKwargs(ProcessingKwargs): + mode: Literal["per_dataset", "per_sample"] + """Mode for computing mean and variance. + | mode | description | + | ----------- | ------------------------------------ | + | per_dataset | Compute for the entire dataset | + | per_sample | Compute for each sample individually | + """ + + reference_tensor: TensorName + """Name of tensor to match.""" + + axes: Annotated[Optional[AxesInCZYX], Field(examples=["xy"])] = None + """The subset of axes to scale jointly. + For example xy to normalize the two image axes for 2d data jointly. + Default: scale all non-batch axes jointly.""" + + eps: Annotated[float, Interval(gt=0, le=0.1)] = 1e-6 + """Epsilon for numeric stability: + "`out = (tensor - mean) / (std + eps) * (ref_std + eps) + ref_mean.""" + + +class ScaleMeanVarianceDescr(ProcessingDescrBase): + """Scale the tensor s.t. its mean and variance match a reference tensor.""" + + name: Literal["scale_mean_variance"] = "scale_mean_variance" + kwargs: ScaleMeanVarianceKwargs + + +PreprocessingDescr = Annotated[ + Union[ + BinarizeDescr, + ClipDescr, + ScaleLinearDescr, + SigmoidDescr, + ZeroMeanUnitVarianceDescr, + ScaleRangeDescr, + ], + Field(discriminator="name"), +] +PostprocessingDescr = Annotated[ + Union[ + BinarizeDescr, + ClipDescr, + ScaleLinearDescr, + SigmoidDescr, + ZeroMeanUnitVarianceDescr, + ScaleRangeDescr, + ScaleMeanVarianceDescr, + ], + Field(discriminator="name"), +] + + +class InputTensorDescr(TensorDescrBase): + data_type: Literal["float32", "uint8", "uint16"] + """For now an input tensor is expected to be given as `float32`. + The data flow in bioimage.io models is explained + [in this diagram.](https://docs.google.com/drawings/d/1FTw8-Rn6a6nXdkZ_SkMumtcjvur9mtIhRqLwnKqZNHM/edit).""" + + shape: Annotated[ + Union[Sequence[int], ParameterizedInputShape], + Field( + examples=[(1, 512, 512, 1), dict(min=(1, 64, 64, 1), step=(0, 32, 32, 0))] + ), + ] + """Specification of input tensor shape.""" + + preprocessing: List[PreprocessingDescr] = Field(default_factory=list) + """Description of how this input should be preprocessed.""" + + @model_validator(mode="after") + def zero_batch_step_and_one_batch_size(self) -> Self: + bidx = self.axes.find("b") + if bidx == -1: + return self + + if isinstance(self.shape, ParameterizedInputShape): + step = self.shape.step + shape = self.shape.min + if step[bidx] != 0: + raise ValueError( + "Input shape step has to be zero in the batch dimension (the batch" + + " dimension can always be increased, but `step` should specify how" + + " to increase the minimal shape to find the largest single batch" + + " shape)" + ) + else: + shape = self.shape + + if shape[bidx] != 1: + raise ValueError("Input shape has to be 1 in the batch dimension b.") + + return self + + @model_validator(mode="after") + def validate_preprocessing_kwargs(self) -> Self: + for p in self.preprocessing: + kwargs_axes = p.kwargs.get("axes", "") + if not isinstance(kwargs_axes, str): + raise ValueError( + f"Expected an `axes` string, but got {type(kwargs_axes)}" + ) + + if any(a not in self.axes for a in kwargs_axes): + raise ValueError("`kwargs.axes` needs to be subset of `axes`") + + return self + + +class OutputTensorDescr(TensorDescrBase): + data_type: Literal[ + "float32", + "float64", + "uint8", + "int8", + "uint16", + "int16", + "uint32", + "int32", + "uint64", + "int64", + "bool", + ] + """Data type. + The data flow in bioimage.io models is explained + [in this diagram.](https://docs.google.com/drawings/d/1FTw8-Rn6a6nXdkZ_SkMumtcjvur9mtIhRqLwnKqZNHM/edit).""" + + shape: Union[Sequence[int], ImplicitOutputShape] + """Output tensor shape.""" + + halo: Optional[Sequence[int]] = None + """The `halo` that should be cropped from the output tensor to avoid boundary effects. + The `halo` is to be cropped from both sides, i.e. `shape_after_crop = shape - 2 * halo`. + To document a `halo` that is already cropped by the model `shape.offset` has to be used instead.""" + + postprocessing: List[PostprocessingDescr] = Field(default_factory=list) + """Description of how this output should be postprocessed.""" + + @model_validator(mode="after") + def matching_halo_length(self) -> Self: + if self.halo and len(self.halo) != len(self.shape): + raise ValueError( + f"halo {self.halo} has to have same length as shape {self.shape}!" + ) + + return self + + @model_validator(mode="after") + def validate_postprocessing_kwargs(self) -> Self: + for p in self.postprocessing: + kwargs_axes = p.kwargs.get("axes", "") + if not isinstance(kwargs_axes, str): + raise ValueError(f"Expected {kwargs_axes} to be a string") + + if any(a not in self.axes for a in kwargs_axes): + raise ValueError("`kwargs.axes` needs to be subset of axes") + + return self + + +KnownRunMode = Literal["deepimagej"] + + +class RunMode(Node): + name: Annotated[ + Union[KnownRunMode, str], warn(KnownRunMode, "Unknown run mode '{value}'.") + ] + """Run mode name""" + + kwargs: Dict[str, Any] = Field(default_factory=dict) + """Run mode specific key word arguments""" + + +class LinkedModel(Node): + """Reference to a bioimage.io model.""" + + id: ModelId + """A valid model `id` from the bioimage.io collection.""" + + version_number: Optional[int] = None + """version number (n-th published version, not the semantic version) of linked model""" + + +class ModelDescr(GenericModelDescrBase, title="bioimage.io model specification"): + """Specification of the fields used in a bioimage.io-compliant RDF that describes AI models with pretrained weights. + + These fields are typically stored in a YAML file which we call a model resource description file (model RDF). + """ + + format_version: Literal["0.4.10",] = "0.4.10" + """Version of the bioimage.io model description specification used. + When creating a new model always use the latest micro/patch version described here. + The `format_version` is important for any consumer software to understand how to parse the fields. + """ + + type: Literal["model"] = "model" + """Specialized resource type 'model'""" + + id: Optional[ModelId] = None + """Model zoo (bioimage.io) wide, unique identifier (assigned by bioimage.io)""" + + authors: NotEmpty[List[Author]] + """The authors are the creators of the model RDF and the primary points of contact.""" + + documentation: Annotated[ + ImportantFileSource, + Field( + examples=[ + "https://raw.githubusercontent.com/bioimage-io/spec-bioimage-io/main/example_specs/models/unet2d_nuclei_broad/README.md", + "README.md", + ], + ), + ] + """โˆˆ๐Ÿ“ฆ URL or relative path to a markdown file with additional documentation. + The recommended documentation file name is `README.md`. An `.md` suffix is mandatory. + The documentation should include a '[#[#]]# Validation' (sub)section + with details on how to quantitatively validate the model on unseen data.""" + + inputs: NotEmpty[List[InputTensorDescr]] + """Describes the input tensors expected by this model.""" + + license: Annotated[ + Union[LicenseId, str], + warn(LicenseId, "Unknown license id '{value}'."), + Field(examples=["CC-BY-4.0", "MIT", "BSD-2-Clause"]), + ] + """A [SPDX license identifier](https://spdx.org/licenses/). + We do notsupport custom license beyond the SPDX license list, if you need that please + [open a GitHub issue](https://github.com/bioimage-io/spec-bioimage-io/issues/new/choose + ) to discuss your intentions with the community.""" + + name: Annotated[ + str, + MinLen(1), + warn(MinLen(5), "Name shorter than 5 characters.", INFO), + warn(MaxLen(64), "Name longer than 64 characters.", INFO), + ] + """A human-readable name of this model. + It should be no longer than 64 characters and only contain letter, number, underscore, minus or space characters.""" + + outputs: NotEmpty[List[OutputTensorDescr]] + """Describes the output tensors.""" + + @field_validator("inputs", "outputs") + @classmethod + def unique_tensor_descr_names( + cls, value: Sequence[Union[InputTensorDescr, OutputTensorDescr]] + ) -> Sequence[Union[InputTensorDescr, OutputTensorDescr]]: + unique_names = {str(v.name) for v in value} + if len(unique_names) != len(value): + raise ValueError("Duplicate tensor descriptor names") + + return value + + @model_validator(mode="after") + def unique_io_names(self) -> Self: + unique_names = {str(ss.name) for s in (self.inputs, self.outputs) for ss in s} + if len(unique_names) != (len(self.inputs) + len(self.outputs)): + raise ValueError("Duplicate tensor descriptor names across inputs/outputs") + + return self + + @model_validator(mode="after") + def minimum_shape2valid_output(self) -> Self: + tensors_by_name: Dict[ + TensorName, Union[InputTensorDescr, OutputTensorDescr] + ] = {t.name: t for t in self.inputs + self.outputs} + + for out in self.outputs: + if isinstance(out.shape, ImplicitOutputShape): + ndim_ref = len(tensors_by_name[out.shape.reference_tensor].shape) + ndim_out_ref = len( + [scale for scale in out.shape.scale if scale is not None] + ) + if ndim_ref != ndim_out_ref: + expanded_dim_note = ( + " Note that expanded dimensions (`scale`: null) are not" + + f" counted for {out.name}'sdimensionality here." + if None in out.shape.scale + else "" + ) + raise ValueError( + f"Referenced tensor '{out.shape.reference_tensor}' with" + + f" {ndim_ref} dimensions does not match output tensor" + + f" '{out.name}' with" + + f" {ndim_out_ref} dimensions.{expanded_dim_note}" + ) + + min_out_shape = self._get_min_shape(out, tensors_by_name) + if out.halo: + halo = out.halo + halo_msg = f" for halo {out.halo}" + else: + halo = [0] * len(min_out_shape) + halo_msg = "" + + if any([s - 2 * h < 1 for s, h in zip(min_out_shape, halo)]): + raise ValueError( + f"Minimal shape {min_out_shape} of output {out.name} is too" + + f" small{halo_msg}." + ) + + return self + + @classmethod + def _get_min_shape( + cls, + t: Union[InputTensorDescr, OutputTensorDescr], + tensors_by_name: Dict[TensorName, Union[InputTensorDescr, OutputTensorDescr]], + ) -> Sequence[int]: + """output with subtracted halo has to result in meaningful output even for the minimal input + see https://github.com/bioimage-io/spec-bioimage-io/issues/392 + """ + if isinstance(t.shape, collections.abc.Sequence): + return t.shape + elif isinstance(t.shape, ParameterizedInputShape): + return t.shape.min + elif isinstance(t.shape, ImplicitOutputShape): + pass + else: + assert_never(t.shape) + + ref_shape = cls._get_min_shape( + tensors_by_name[t.shape.reference_tensor], tensors_by_name + ) + + if None not in t.shape.scale: + scale: Sequence[float, ...] = t.shape.scale # type: ignore + else: + expanded_dims = [idx for idx, sc in enumerate(t.shape.scale) if sc is None] + new_ref_shape: List[int] = [] + for idx in range(len(t.shape.scale)): + ref_idx = idx - sum(int(exp < idx) for exp in expanded_dims) + new_ref_shape.append(1 if idx in expanded_dims else ref_shape[ref_idx]) + + ref_shape = new_ref_shape + assert len(ref_shape) == len(t.shape.scale) + scale = [0.0 if sc is None else sc for sc in t.shape.scale] + + offset = t.shape.offset + assert len(offset) == len(scale) + return [int(rs * s + 2 * off) for rs, s, off in zip(ref_shape, scale, offset)] + + @model_validator(mode="after") + def validate_tensor_references_in_inputs(self) -> Self: + for t in self.inputs: + for proc in t.preprocessing: + if "reference_tensor" not in proc.kwargs: + continue + + ref_tensor = proc.kwargs["reference_tensor"] + if ref_tensor is not None and str(ref_tensor) not in { + str(t.name) for t in self.inputs + }: + raise ValueError(f"'{ref_tensor}' not found in inputs") + + if ref_tensor == t.name: + raise ValueError( + f"invalid self reference for preprocessing of tensor {t.name}" + ) + + return self + + @model_validator(mode="after") + def validate_tensor_references_in_outputs(self) -> Self: + for t in self.outputs: + for proc in t.postprocessing: + if "reference_tensor" not in proc.kwargs: + continue + ref_tensor = proc.kwargs["reference_tensor"] + if ref_tensor is not None and str(ref_tensor) not in { + str(t.name) for t in self.inputs + }: + raise ValueError(f"{ref_tensor} not found in inputs") + + return self + + packaged_by: List[Author] = Field(default_factory=list) + """The persons that have packaged and uploaded this model. + Only required if those persons differ from the `authors`.""" + + parent: Optional[LinkedModel] = None + """The model from which this model is derived, e.g. by fine-tuning the weights.""" + + @field_validator("parent", mode="before") + @classmethod + def ignore_url_parent(cls, parent: Any): + if isinstance(parent, dict): + return None + + else: + return parent + + run_mode: Optional[RunMode] = None + """Custom run mode for this model: for more complex prediction procedures like test time + data augmentation that currently cannot be expressed in the specification. + No standard run modes are defined yet.""" + + sample_inputs: List[ImportantFileSource] = Field(default_factory=list) + """โˆˆ๐Ÿ“ฆ URLs/relative paths to sample inputs to illustrate possible inputs for the model, + for example stored as PNG or TIFF images. + The sample files primarily serve to inform a human user about an example use case""" + + sample_outputs: List[ImportantFileSource] = Field(default_factory=list) + """โˆˆ๐Ÿ“ฆ URLs/relative paths to sample outputs corresponding to the `sample_inputs`.""" + + test_inputs: NotEmpty[ + List[Annotated[ImportantFileSource, WithSuffix(".npy", case_sensitive=True)]] + ] + """โˆˆ๐Ÿ“ฆ Test input tensors compatible with the `inputs` description for a **single test case**. + This means if your model has more than one input, you should provide one URL/relative path for each input. + Each test input should be a file with an ndarray in + [numpy.lib file format](https://numpy.org/doc/stable/reference/generated/numpy.lib.format.html#module-numpy.lib.format). + The extension must be '.npy'.""" + + test_outputs: NotEmpty[ + List[Annotated[ImportantFileSource, WithSuffix(".npy", case_sensitive=True)]] + ] + """โˆˆ๐Ÿ“ฆ Analog to `test_inputs`.""" + + timestamp: Datetime + """Timestamp in [ISO 8601](#https://en.wikipedia.org/wiki/ISO_8601) format + with a few restrictions listed [here](https://docs.python.org/3/library/datetime.html#datetime.datetime.fromisoformat).""" + + training_data: Union[LinkedDataset, DatasetDescr, None] = None + """The dataset used to train this model""" + + weights: WeightsDescr + """The weights for this model. + Weights can be given for different formats, but should otherwise be equivalent. + The available weight formats determine which consumers can use this model.""" + + @model_validator(mode="before") + @classmethod + def _convert_from_older_format( + cls, data: BioimageioYamlContent, / + ) -> BioimageioYamlContent: + convert_from_older_format(data) + return data + + def get_input_test_arrays(self) -> List[NDArray[Any]]: + data = [load_array(download(ipt).path) for ipt in self.test_inputs] + assert all(isinstance(d, np.ndarray) for d in data) + return data + + def get_output_test_arrays(self) -> List[NDArray[Any]]: + data = [load_array(download(out).path) for out in self.test_outputs] + assert all(isinstance(d, np.ndarray) for d in data) + return data diff --git a/bioimageio/spec/model/v0_4/__init__.py b/bioimageio/spec/model/v0_4/__init__.py deleted file mode 100644 index 71ad3da34..000000000 --- a/bioimageio/spec/model/v0_4/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -from . import converters, raw_nodes, schema, utils -from .raw_nodes import FormatVersion - -try: - from typing import get_args -except ImportError: - from typing_extensions import get_args # type: ignore - - -format_version = get_args(FormatVersion)[-1] diff --git a/bioimageio/spec/model/v0_4/converters.py b/bioimageio/spec/model/v0_4/converters.py deleted file mode 100644 index 75428bfaf..000000000 --- a/bioimageio/spec/model/v0_4/converters.py +++ /dev/null @@ -1,114 +0,0 @@ -import copy -from typing import Any, Dict - -from marshmallow import missing - -from bioimageio.spec.rdf.v0_2.converters import remove_slash_from_names - - -def convert_model_from_v0_3_to_0_4_0(data: Dict[str, Any]) -> Dict[str, Any]: - from bioimageio.spec.model import v0_3 - - data = copy.deepcopy(data) - - data = v0_3.converters.maybe_convert(data) - v0_3.schema.Model().validate(data) - - data.pop("language", None) - data.pop("framework", None) - - architecture = data.pop("source", missing) - architecture_sha256 = data.pop("sha256", missing) - kwargs = data.pop("kwargs", missing) - pytorch_state_dict_weights_entry = data.get("weights", {}).get("pytorch_state_dict") - if pytorch_state_dict_weights_entry is not None: - if architecture is not missing: - pytorch_state_dict_weights_entry["architecture"] = architecture - - if architecture_sha256 is not missing: - pytorch_state_dict_weights_entry["architecture_sha256"] = architecture_sha256 - - if kwargs is not missing: - pytorch_state_dict_weights_entry["kwargs"] = kwargs - - torchscript_weights_entry = data.get("weights", {}).pop("pytorch_script", None) - if torchscript_weights_entry is not None: - data["weights"]["torchscript"] = torchscript_weights_entry - - data["format_version"] = "0.4.0" - - return data - - -def convert_model_from_v0_4_0_to_0_4_1(data: Dict[str, Any]) -> Dict[str, Any]: - data = dict(data) - - # move dependencies from root to pytorch_state_dict weights entry - deps = data.pop("dependencies", None) - weights = data.get("weights", {}) - if deps and weights and isinstance(weights, dict): - entry = weights.get("pytorch_state_dict") - if entry and isinstance(entry, dict): - entry["dependencies"] = deps - - data["format_version"] = "0.4.1" - return data - - -def convert_model_from_v0_4_4_to_0_4_5(data: Dict[str, Any]) -> Dict[str, Any]: - data = dict(data) - - parent = data.pop("parent", None) - if parent and "uri" in parent: - data["parent"] = parent["uri"] - - data["format_version"] = "0.4.5" - return data - - -def convert_model_from_v0_4_6_to_0_4_7(data: Dict[str, Any]) -> Dict[str, Any]: - data = dict(data) - - remove_slash_from_names(data) - - data["format_version"] = "0.4.7" - return data - - -def maybe_convert(data: Dict[str, Any]) -> Dict[str, Any]: - """auto converts model 'data' to newest format""" - major, minor, patch = map(int, data.get("format_version", "0.3.0").split(".")) - if major == 0 and minor < 4: - data = convert_model_from_v0_3_to_0_4_0(data) - - if data["format_version"] == "0.4.0": - data = convert_model_from_v0_4_0_to_0_4_1(data) - - if data["format_version"] in ("0.4.1", "0.4.2", "0.4.3"): - data["format_version"] = "0.4.4" - - if data["format_version"] == "0.4.4": - data = convert_model_from_v0_4_4_to_0_4_5(data) - - if data["format_version"] == "0.4.5": - data["format_version"] = "0.4.6" - - if data["format_version"] == "0.4.6": - data = convert_model_from_v0_4_6_to_0_4_7(data) - - if data["format_version"] == "0.4.7": - data["format_version"] = "0.4.8" - - if data["format_version"] == "0.4.8": - data["format_version"] = "0.4.9" - - # remove 'future' from config if no other than the used future entries exist - config = data.get("config", {}) - if config.get("future") == {}: - del config["future"] - - # remove 'config' if now empty - if data.get("config") == {}: - del data["config"] - - return data diff --git a/bioimageio/spec/model/v0_4/raw_nodes.py b/bioimageio/spec/model/v0_4/raw_nodes.py deleted file mode 100644 index 78597377e..000000000 --- a/bioimageio/spec/model/v0_4/raw_nodes.py +++ /dev/null @@ -1,142 +0,0 @@ -import packaging.version -from dataclasses import dataclass -from datetime import datetime -from pathlib import Path -from typing import Any, Dict, List, Union - -from marshmallow import missing -from marshmallow.utils import _Missing - -from bioimageio.spec.dataset.v0_2.raw_nodes import Dataset -from bioimageio.spec.model.v0_3.raw_nodes import ( - InputTensor, - KerasHdf5WeightsEntry as KerasHdf5WeightsEntry03, - OnnxWeightsEntry as OnnxWeightsEntry03, - OutputTensor, - Postprocessing, - PostprocessingName, - Preprocessing, - PreprocessingName, - RunMode, - TensorflowJsWeightsEntry as TensorflowJsWeightsEntry03, - TensorflowSavedModelBundleWeightsEntry as TensorflowSavedModelBundleWeightsEntry03, - _WeightsEntryBase as _WeightsEntryBase03, -) -from bioimageio.spec.rdf.v0_2.raw_nodes import Author, Maintainer, RDF_Base as _RDF -from bioimageio.spec.shared.raw_nodes import ( - Dependencies, - ImplicitOutputShape, - ImportableModule, - ImportableSourceFile, - ParametrizedInputShape, - RawNode, - URI, -) - -try: - from typing import Literal -except ImportError: - from typing_extensions import Literal # type: ignore - -# reassign to use imported classes -ImplicitOutputShape = ImplicitOutputShape -InputTensor = InputTensor -Maintainer = Maintainer -OutputTensor = OutputTensor -ParametrizedInputShape = ParametrizedInputShape -Postprocessing = Postprocessing -PostprocessingName = PostprocessingName -Preprocessing = Preprocessing -PreprocessingName = PreprocessingName - -FormatVersion = Literal[ - "0.4.0", "0.4.1", "0.4.2", "0.4.3", "0.4.4", "0.4.5", "0.4.6", "0.4.7", "0.4.8", "0.4.9" -] # newest format needs to be last (used in __init__.py) -WeightsFormat = Literal[ - "pytorch_state_dict", "torchscript", "keras_hdf5", "tensorflow_js", "tensorflow_saved_model_bundle", "onnx" -] - -ImportableSource = Union[ImportableSourceFile, ImportableModule] - - -@dataclass -class _WeightsEntryBase(_WeightsEntryBase03): - dependencies: Union[_Missing, Dependencies] = missing - - -@dataclass -class KerasHdf5WeightsEntry(_WeightsEntryBase, KerasHdf5WeightsEntry03): - pass - - -@dataclass -class OnnxWeightsEntry(_WeightsEntryBase, OnnxWeightsEntry03): - pass - - -@dataclass -class PytorchStateDictWeightsEntry(_WeightsEntryBase): - weights_format_name = "Pytorch State Dict" - architecture: ImportableSource = missing - architecture_sha256: Union[_Missing, str] = missing - kwargs: Union[_Missing, Dict[str, Any]] = missing - pytorch_version: Union[_Missing, packaging.version.Version] = missing - - -@dataclass -class TensorflowJsWeightsEntry(_WeightsEntryBase, TensorflowJsWeightsEntry03): - pass - - -@dataclass -class TensorflowSavedModelBundleWeightsEntry(_WeightsEntryBase, TensorflowSavedModelBundleWeightsEntry03): - pass - - -@dataclass -class TorchscriptWeightsEntry(_WeightsEntryBase): - weights_format_name = "Torchscript" - pytorch_version: Union[_Missing, packaging.version.Version] = missing - - -WeightsEntry = Union[ - KerasHdf5WeightsEntry, - OnnxWeightsEntry, - PytorchStateDictWeightsEntry, - TensorflowJsWeightsEntry, - TensorflowSavedModelBundleWeightsEntry, - TorchscriptWeightsEntry, -] - - -@dataclass -class LinkedDataset(RawNode): - id: str - - -@dataclass -class ModelParent(RawNode): - id: Union[_Missing, str] = missing - uri: Union[_Missing, URI, Path] = missing - sha256: Union[_Missing, str] = missing - - -@dataclass -class Model(_RDF): - _include_in_package = ("covers", "documentation", "test_inputs", "test_outputs", "sample_inputs", "sample_outputs") - - format_version: FormatVersion = missing - inputs: List[InputTensor] = missing - license: str = missing - outputs: List[OutputTensor] = missing - packaged_by: Union[_Missing, List[Author]] = missing - parent: Union[_Missing, ModelParent] = missing - run_mode: Union[_Missing, RunMode] = missing - sample_inputs: Union[_Missing, List[Union[URI, Path]]] = missing - sample_outputs: Union[_Missing, List[Union[URI, Path]]] = missing - test_inputs: List[Union[URI, Path]] = missing - test_outputs: List[Union[URI, Path]] = missing - timestamp: datetime = missing - training_data: Union[_Missing, Dataset, LinkedDataset] = missing - type: Literal["model"] = missing - weights: Dict[WeightsFormat, WeightsEntry] = missing diff --git a/bioimageio/spec/model/v0_4/schema.py b/bioimageio/spec/model/v0_4/schema.py deleted file mode 100644 index 877430f77..000000000 --- a/bioimageio/spec/model/v0_4/schema.py +++ /dev/null @@ -1,705 +0,0 @@ -import typing -from copy import deepcopy -from types import ModuleType - -import numpy -from marshmallow import ( - RAISE, - ValidationError, - missing, - pre_load, - validates, - validates_schema, -) - -from bioimageio.spec.dataset.v0_2.schema import Dataset as _Dataset -from bioimageio.spec.model.v0_3.schema import ( - KerasHdf5WeightsEntry as KerasHdf5WeightsEntry03, -) -from bioimageio.spec.model.v0_3.schema import ( - OnnxWeightsEntry as OnnxWeightsEntry03, -) -from bioimageio.spec.model.v0_3.schema import ( - Postprocessing as Postprocessing03, -) -from bioimageio.spec.model.v0_3.schema import ( - Preprocessing as Preprocessing03, -) -from bioimageio.spec.model.v0_3.schema import ( - TensorflowJsWeightsEntry as TensorflowJsWeightsEntry03, -) -from bioimageio.spec.model.v0_3.schema import ( - TensorflowSavedModelBundleWeightsEntry as TensorflowSavedModelBundleWeightsEntry03, -) -from bioimageio.spec.model.v0_3.schema import ( - _common_sha256_hint, -) -from bioimageio.spec.model.v0_3.schema import ( - _WeightsEntryBase as _WeightsEntryBase03, -) -from bioimageio.spec.rdf import v0_2 as rdf -from bioimageio.spec.shared import LICENSES, field_validators, fields -from bioimageio.spec.shared.common import get_args, get_args_flat -from bioimageio.spec.shared.schema import ( - ImplicitOutputShape, - ParametrizedInputShape, - SharedBioImageIOSchema, -) - -from . import raw_nodes - - -class _BioImageIOSchema(SharedBioImageIOSchema): - raw_nodes: typing.ClassVar[ModuleType] = raw_nodes - - -class _TensorBase(_BioImageIOSchema): - name = fields.String( - required=True, - validate=field_validators.Predicate("isidentifier"), - bioimageio_description="Tensor name. No duplicates are allowed.", - ) - description = fields.String() - axes = fields.Axes( - required=True, - bioimageio_description="""Axes identifying characters from: bitczyx. Same length and order as the axes in `shape`. - -| character | description | -| --- | --- | -| b | batch (groups multiple samples) | -| i | instance/index/element | -| t | time | -| c | channel | -| z | spatial dimension z | -| y | spatial dimension y | -| x | spatial dimension x |""", - ) - data_type = fields.String( - required=True, - bioimageio_description="The data type of this tensor. For inputs, only `float32` is allowed and the consumer " - "software needs to ensure that the correct data type is passed here. For outputs can be any of `float32, " - "float64, (u)int8, (u)int16, (u)int32, (u)int64`. The data flow in bioimage.io models is explained " - "[in this diagram.](https://docs.google.com/drawings/d/1FTw8-Rn6a6nXdkZ_SkMumtcjvur9mtIhRqLwnKqZNHM/edit).", - ) - data_range = fields.Tuple( - (fields.Float(allow_nan=True), fields.Float(allow_nan=True)), - bioimageio_description="Tuple `(minimum, maximum)` specifying the allowed range of the data in this tensor. " - "If not specified, the full data range that can be expressed in `data_type` is allowed.", - ) - shape: fields.Union - - processing_name: str - - @validates_schema - def validate_processing_kwargs(self, data, **kwargs): - axes = data.get("axes", []) - processing_list = data.get(self.processing_name, []) - for processing in processing_list: - kwargs = processing.kwargs or {} - kwarg_axes = kwargs.get("axes", "") - if any(a not in axes for a in kwarg_axes): - raise ValidationError("`kwargs.axes` needs to be subset of axes") - - -class Preprocessing(Preprocessing03): - kwargs = fields.Kwargs( - bioimageio_description=f"Key word arguments as described in [preprocessing spec]" - f"(https://github.com/bioimage-io/spec-bioimage-io/blob/gh-pages/preprocessing_spec_" - f"{'_'.join(get_args(raw_nodes.FormatVersion)[-1].split('.')[:2])}.md)." - ) - - -class Postprocessing(Postprocessing03): - kwargs = fields.Kwargs( - bioimageio_description=f"Key word arguments as described in [postprocessing spec]" - f"(https://github.com/bioimage-io/spec-bioimage-io/blob/gh-pages/postprocessing_spec_" - f"{'_'.join(get_args(raw_nodes.FormatVersion)[-1].split('.')[:2])}.md)." - ) - - -class InputTensor(_TensorBase): - shape = fields.Union( - [ - fields.ExplicitShape( - bioimageio_description="Exact shape with same length as `axes`, e.g. `shape: [1, 512, 512, 1]`" - ), - fields.Nested( - ParametrizedInputShape(), - bioimageio_description="A sequence of valid shapes given by `shape = min + k * step for k in {0, 1, ...}`.", - ), - ], - required=True, - bioimageio_description="Specification of input tensor shape.", - ) - preprocessing = fields.List( - fields.Nested(Preprocessing()), bioimageio_description="Description of how this input should be preprocessed." - ) - processing_name = "preprocessing" - - @validates_schema - def zero_batch_step_and_one_batch_size(self, data, **kwargs): - axes = data.get("axes") - shape = data.get("shape") - - if axes is None or shape is None: - raise ValidationError("Failed to validate batch_step=0 and batch_size=1 due to other validation errors") - - axes = data["axes"] - shape = data["shape"] - - bidx = axes.find("b") - if bidx == -1: - return - - if isinstance(shape, raw_nodes.ParametrizedInputShape): - step = shape.step - shape = shape.min - - elif isinstance(shape, list): - step = [0] * len(shape) - else: - raise ValidationError(f"Unknown shape type {type(shape)}") - - if step[bidx] != 0: - raise ValidationError( - "Input shape step has to be zero in the batch dimension (the batch dimension can always be " - "increased, but `step` should specify how to increase the minimal shape to find the largest " - "single batch shape)" - ) - - if shape[bidx] != 1: - raise ValidationError("Input shape has to be 1 in the batch dimension b.") - - -class OutputTensor(_TensorBase): - shape = fields.Union( - [ - fields.ExplicitShape(), - fields.Nested( - ImplicitOutputShape(), - bioimageio_description="In reference to the shape of an input tensor, the shape of the output " - "tensor is `shape = shape(input_tensor) * scale + 2 * offset`.", - ), - ], - required=True, - bioimageio_description="Specification of output tensor shape.", - ) - halo = fields.List( - fields.Integer(), - bioimageio_description="The halo to crop from the output tensor (for example to crop away boundary effects or " - "for tiling). The halo should be cropped from both sides, i.e. `shape_after_crop = shape - 2 * halo`. The " - "`halo` is not cropped by the bioimage.io model, but is left to be cropped by the consumer software. Use " - "`shape:offset` if the model output itself is cropped and input and output shapes not fixed.", - ) - postprocessing = fields.List( - fields.Nested(Postprocessing()), - bioimageio_description="Description of how this output should be postprocessed.", - ) - processing_name = "postprocessing" - - @validates_schema - def matching_halo_length(self, data, **kwargs): - shape = data.get("shape") - halo = data.get("halo") - if halo is None: - return - elif isinstance(shape, list) or isinstance(shape, raw_nodes.ImplicitOutputShape): - if shape is None or len(halo) != len(shape): - raise ValidationError(f"halo {halo} has to have same length as shape {shape}!") - else: - raise NotImplementedError(type(shape)) - - -class _WeightsEntryBase(_WeightsEntryBase03): - raw_nodes: typing.ClassVar[ModuleType] = raw_nodes - dependencies = fields.Dependencies( - bioimageio_description="Dependency manager and dependency file, specified as `:`. For example: 'conda:./environment.yaml', 'maven:./pom.xml', or 'pip:./requirements.txt'. " - "These dependencies are only used for the specified weight format." - ) - - -class KerasHdf5WeightsEntry(KerasHdf5WeightsEntry03, _WeightsEntryBase): - pass - - -class OnnxWeightsEntry(OnnxWeightsEntry03, _WeightsEntryBase): - pass - - -class PytorchStateDictWeightsEntry(_WeightsEntryBase): - bioimageio_description = "PyTorch state dictionary weights format" - weights_format = fields.String(validate=field_validators.Equal("pytorch_state_dict"), required=True, load_only=True) - architecture = fields.ImportableSource( - required=True, - bioimageio_description="Source code of the model architecture that either points to a " - "local implementation: `:` or the " - "implementation in an available dependency: `..`.\nFor example: " - "`my_function.py:MyImplementation` or `bioimageio.core.some_module.some_class_or_function`.", - ) - architecture_sha256 = fields.String( - bioimageio_maybe_required=True, - validate=field_validators.Length(equal=64), - bioimageio_description="This field is only required if the architecture points to a source file. " - "SHA256 checksum of the model source code file." - + _common_sha256_hint.replace(" ", " "), # sha256 hint with one more intend level - ) - kwargs = fields.Kwargs( - bioimageio_description="Keyword arguments for the implementation specified by `architecture`." - ) - pytorch_version = fields.Version() - - @validates_schema - def sha_for_source_code_file(self, data, **kwargs): - arch = data.get("architecture") - if isinstance(arch, raw_nodes.ImportableModule): - return - elif isinstance(arch, raw_nodes.ImportableSourceFile): - sha = data.get("architecture_sha256") - if sha is None: - raise ValidationError( - "When specifying 'architecture' with a callable from a source file, " - "the corresponding 'architecture_sha256' field is required." - ) - - -class TensorflowJsWeightsEntry(TensorflowJsWeightsEntry03, _WeightsEntryBase): - pass - - -class TensorflowSavedModelBundleWeightsEntry(TensorflowSavedModelBundleWeightsEntry03, _WeightsEntryBase): - pass - - -class Dataset(_Dataset): - short_bioimageio_description = "in-place definition of [dataset RDF](https://github.com/bioimage-io/spec-bioimage-io/blob/gh-pages/dataset_spec_0_2.md)" - - -class TorchscriptWeightsEntry(_WeightsEntryBase): - raw_nodes: typing.ClassVar[ModuleType] = raw_nodes - - bioimageio_description = "Torchscript weights format" - weights_format = fields.String(validate=field_validators.Equal("torchscript"), required=True, load_only=True) - pytorch_version = fields.Version() - - -WeightsEntry = typing.Union[ - KerasHdf5WeightsEntry, - OnnxWeightsEntry, - PytorchStateDictWeightsEntry, - TensorflowJsWeightsEntry, - TensorflowSavedModelBundleWeightsEntry, - TorchscriptWeightsEntry, -] - - -class RunMode(_BioImageIOSchema): - name = fields.String(required=True, bioimageio_description="The name of the `run_mode`") - kwargs = fields.Kwargs() - - @validates("name") - def warn_on_unrecognized_run_mode(self, value: str): - if isinstance(value, str): - self.warn("name", f"Unrecognized run mode '{value}'") - - -class LinkedDataset(_BioImageIOSchema): - id = fields.String(bioimageio_description="dataset id") - - -class ModelParent(_BioImageIOSchema): - id = fields.BioImageIO_ID(resource_type="model") - uri = fields.Union( - [fields.URI(), fields.Path()], bioimageio_description="URL or local relative path of a model RDF" - ) - sha256 = fields.SHA256(bioimageio_description="Hash of the parent model RDF. Note: the hash is not validated") - - @validates_schema - def id_xor_uri(self, data, **kwargs): - if ("id" in data) == ("uri" in data): - raise ValidationError("Either 'id' or 'uri' are required (not both).") - - -class Model(rdf.schema.RDF): - raw_nodes: typing.ClassVar = raw_nodes - - class Meta: - unknown = RAISE - exclude = ("source",) # while RDF does have a source field, Model does not - - bioimageio_description = f"""# BioImage.IO Model Resource Description File Specification {get_args(raw_nodes.FormatVersion)[-1]} -This specification defines the fields used in a BioImage.IO-compliant resource description file (`RDF`) for describing AI models with pretrained weights. -These fields are typically stored in YAML files which we call Model Resource Description Files or `model RDF`. -The model RDFs can be downloaded or uploaded to the bioimage.io website, produced or consumed by BioImage.IO-compatible consumers(e.g. image analysis software or other website). - -The model RDF YAML file contains mandatory and optional fields. In the following description, optional fields are indicated by _optional_. -_optional*_ with an asterisk indicates the field is optional depending on the value in another field. -""" - # todo: sync authors with RDF - authors = fields.List( - fields.Nested(rdf.schema.Author()), - validate=field_validators.Length(min=1), - required=True, - bioimageio_description=rdf.schema.RDF.authors_bioimageio_description, - ) - - badges = missing # todo: allow badges for Model (RDF has it) - cite = fields.List( - fields.Nested(rdf.schema.CiteEntry()), - required=False, - bioimageio_description=rdf.schema.RDF.cite_bioimageio_description, - ) - - @validates_schema - def warn_on_missing_cite(self, data: dict, **kwargs): - if "cite" not in data: - self.warn("cite", "missing") - - config = fields.YamlDict( - bioimageio_description=rdf.schema.RDF.config_bioimageio_description - + """ -For example: -```yaml -config: - # custom config for DeepImageJ, see https://github.com/bioimage-io/configuration/issues/23 - deepimagej: - model_keys: - # In principle the tag "SERVING" is used in almost every tf model - model_tag: tf.saved_model.tag_constants.SERVING - # Signature definition to call the model. Again "SERVING" is the most general - signature_definition: tf.saved_model.signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY - test_information: - input_size: [2048x2048] # Size of the input images - output_size: [1264x1264 ]# Size of all the outputs - device: cpu # Device used. In principle either cpu or GPU - memory_peak: 257.7 Mb # Maximum memory consumed by the model in the device - runtime: 78.8s # Time it took to run the model - pixel_size: [9.658E-4ยตmx9.658E-4ยตm] # Size of the pixels of the input -``` -""" - ) - - documentation = fields.Union( - [ - fields.URL(), - fields.Path(), - ], - required=True, - bioimageio_description="Relative path or URL to file with additional documentation in markdown. " - "The file must be in markdown format with `.md` file name extension. " - "It is recommended to use `README.md` as the documentation name. " - "The documentation should include a (sub)section '[#[#]]# Validation' with details on how to quantitatively " - "validate the model on unseen data. ", - ) - - format_version = fields.String( - validate=field_validators.OneOf(get_args_flat(raw_nodes.FormatVersion)), - required=True, - bioimageio_description_order=0, - bioimageio_description=f"""Version of the BioImage.IO Model Resource Description File Specification used. -This is mandatory, and important for the consumer software to verify before parsing the fields. -The recommended behavior for the implementation is to keep backward compatibility and throw an error if the model yaml -is in an unsupported format version. The current format version described here is -{get_args(raw_nodes.FormatVersion)[-1]}""", - ) - - git_repo = fields.URL( - bioimageio_description=rdf.schema.RDF.git_repo_bioimageio_description - + "If the model is contained in a subfolder of a git repository, then a url to the exact folder" - + "(which contains the configuration yaml file) should be used." - ) - - inputs = fields.List( - fields.Nested(InputTensor()), - validate=field_validators.Length(min=1), - required=True, - bioimageio_description="Describes the input tensors expected by this model.", - ) - - @validates("inputs") - def no_duplicate_input_tensor_names(self, value: typing.List[raw_nodes.InputTensor]): - if not isinstance(value, list) or not all(isinstance(v, raw_nodes.InputTensor) for v in value): - raise ValidationError("Could not check for duplicate input tensor names due to another validation error.") - - names = [t.name for t in value] - if len(names) > len(set(names)): - raise ValidationError("Duplicate input tensor names are not allowed.") - - license = fields.String( - validate=field_validators.OneOf(LICENSES), - required=True, - bioimageio_description=rdf.schema.RDF.license_bioimageio_description, - ) - - name = fields.Name( - required=True, - bioimageio_description="Name of this model. It should be human-readable and only contain letters, numbers, " - "underscore '_', minus '-' or spaces and not be longer than 64 characters.", - ) - - outputs = fields.List( - fields.Nested(OutputTensor()), - validate=field_validators.Length(min=1), - bioimageio_description="Describes the output tensors from this model.", - ) - - @validates("outputs") - def no_duplicate_output_tensor_names(self, value: typing.List[raw_nodes.OutputTensor]): - if not isinstance(value, list) or not all(isinstance(v, raw_nodes.OutputTensor) for v in value): - raise ValidationError("Could not check for duplicate output tensor names due to another validation error.") - - names = [t["name"] if isinstance(t, dict) else t.name for t in value] - if len(names) > len(set(names)): - raise ValidationError("Duplicate output tensor names are not allowed.") - - @validates_schema - def inputs_and_outputs(self, data, **kwargs) -> None: - ipts: typing.List[raw_nodes.InputTensor] = data.get("inputs") - outs: typing.List[raw_nodes.OutputTensor] = data.get("outputs") - if any( - [ - not isinstance(ipts, list), - not isinstance(outs, list), - not all(isinstance(v, raw_nodes.InputTensor) for v in ipts), - not all(isinstance(v, raw_nodes.OutputTensor) for v in outs), - ] - ): - raise ValidationError("Could not check for duplicate tensor names due to another validation error.") - - # no duplicate tensor names - names = [t.name for t in ipts + outs] # type: ignore - if len(names) > len(set(names)): - raise ValidationError("Duplicate tensor names are not allowed.") - - tensors_by_name: typing.Dict[str, typing.Union[raw_nodes.InputTensor, raw_nodes.OutputTensor]] = { - t.name: t for t in ipts + outs # type: ignore - } - - # minimum shape leads to valid output: - # output with subtracted halo has to result in meaningful output even for the minimal input - # see https://github.com/bioimage-io/spec-bioimage-io/issues/392 - def get_min_shape(t) -> numpy.ndarray: - if isinstance(t.shape, raw_nodes.ParametrizedInputShape): - shape = numpy.array(t.shape.min) - elif isinstance(t.shape, raw_nodes.ImplicitOutputShape): - scale = list(t.shape.scale) - ref_shape = get_min_shape(tensors_by_name[t.shape.reference_tensor]) - - if any(sc is None for sc in scale): - expanded_dims = tuple(idx for idx, sc in enumerate(scale) if sc is None) - new_ref_shape = [] - for idx in range(len(scale)): - ref_idx = idx - sum(int(exp < idx) for exp in expanded_dims) - new_ref_shape.append(1 if idx in expanded_dims else ref_shape[ref_idx]) - ref_shape = numpy.array(new_ref_shape) - assert len(ref_shape) == len(scale) - scale = [0.0 if sc is None else sc for sc in scale] - - offset = numpy.array(t.shape.offset) - shape = ref_shape * numpy.array(scale) + 2 * offset - else: - shape = numpy.array(t.shape) - - return shape - - for out in outs: - if isinstance(out.shape, raw_nodes.ImplicitOutputShape): - ndim_ref = len(tensors_by_name[out.shape.reference_tensor].shape) - ndim_out_ref = len([scale for scale in out.shape.scale if scale is not None]) - if ndim_ref != ndim_out_ref: - expanded_dim_note = ( - f" Note that expanded dimensions (scale: null) are not counted for {out.name}'s dimensionality." - if None in out.shape.scale - else "" - ) - raise ValidationError( - f"Referenced tensor {out.shape.reference_tensor} " - f"with {ndim_ref} dimensions does not match " - f"output tensor {out.name} with {ndim_out_ref} dimensions.{expanded_dim_note}" - ) - - min_out_shape = get_min_shape(out) - if out.halo: - halo = out.halo - halo_msg = f" for halo {out.halo}" - else: - halo = [0] * len(min_out_shape) - halo_msg = "" - - if any([s - 2 * h < 1 for s, h in zip(min_out_shape, halo)]): - raise ValidationError(f"Minimal shape {min_out_shape} of output {out.name} is too small{halo_msg}.") - - packaged_by = fields.List( - fields.Nested(rdf.schema.Author()), - bioimageio_description="The persons that have packaged and uploaded this model. Only needs to be specified if " - "different from `authors` in root or any entry in `weights`.", - ) - - parent = fields.Nested( - ModelParent(), - bioimageio_description="The model from which this model is derived, e.g. by fine-tuning the weights.", - ) - - run_mode = fields.Nested( - RunMode(), - bioimageio_description="Custom run mode for this model: for more complex prediction procedures like test time " - "data augmentation that currently cannot be expressed in the specification. " - "No standard run modes are defined yet.", - ) - - sample_inputs = fields.List( - fields.Union([fields.URI(), fields.Path()]), - validate=field_validators.Length(min=1), - bioimageio_description="List of URIs/local relative paths to sample inputs to illustrate possible inputs for " - "the model, for example stored as png or tif images. " - "The model is not tested with these sample files that serve to inform a human user about an example use case.", - ) - sample_outputs = fields.List( - fields.Union([fields.URI(), fields.Path()]), - validate=field_validators.Length(min=1), - bioimageio_description="List of URIs/local relative paths to sample outputs corresponding to the " - "`sample_inputs`.", - ) - - test_inputs = fields.List( - fields.Union([fields.URI(), fields.Path()]), - validate=field_validators.Length(min=1), - required=True, - bioimageio_description="List of URIs or local relative paths to test inputs as described in inputs for " - "**a single test case**. " - "This means if your model has more than one input, you should provide one URI for each input." - "Each test input should be a file with a ndarray in " - "[numpy.lib file format](https://numpy.org/doc/stable/reference/generated/numpy.lib.format.html#module-numpy.lib.format)." - "The extension must be '.npy'.", - ) - test_outputs = fields.List( - fields.Union([fields.URI(), fields.Path()]), - validate=field_validators.Length(min=1), - required=True, - bioimageio_description="Analog to test_inputs.", - ) - - timestamp = fields.DateTime( - required=True, - bioimageio_description="Timestamp of the initial creation of this model in [ISO 8601]" - "(#https://en.wikipedia.org/wiki/ISO_8601) format.", - ) - - training_data = fields.Union([fields.Nested(Dataset()), fields.Nested(LinkedDataset())]) - - weights = fields.Dict( - fields.String( - validate=field_validators.OneOf(get_args(raw_nodes.WeightsFormat)), - required=True, - bioimageio_description="Format of this set of weights. " - f"One of: {', '.join(get_args(raw_nodes.WeightsFormat))}", - ), - fields.Union( - [fields.Nested(we()) for we in get_args(WeightsEntry)], - short_bioimageio_description=( - "The weights for this model. Weights can be given for different formats, but should " - "otherwise be equivalent. " - "See [weight_formats_spec_0_4.md]" - "(https://github.com/bioimage-io/spec-bioimage-io/blob/gh-pages/weight_formats_spec_0_4.md) " - "for the required and optional fields per weight format. " - "The available weight formats determine which consumers can use this model." - ), - ), - required=True, - ) - - @pre_load - def add_weights_format_key_to_weights_entry_value(self, data: dict, many=False, partial=False, **kwargs): - data = deepcopy(data) # Schema.validate() calls pre_load methods, thus we should not modify the input data - if many or partial: - raise NotImplementedError - - for weights_format, weights_entry in data.get("weights", {}).items(): - if "weights_format" in weights_entry: - raise ValidationError(f"Got unexpected key 'weights_format' in weights entry {weights_format}") - - weights_entry["weights_format"] = weights_format - - return data - - @validates_schema - def validate_reference_tensor_names(self, data, **kwargs) -> None: - def get_tnames(tname: str): - return [t.get("name") if isinstance(t, dict) else t.name for t in data.get(tname, [])] - - valid_input_tensor_references = get_tnames("inputs") - ins = data.get("inputs", []) - outs = data.get("outputs", []) - if not isinstance(ins, list) or not isinstance(outs, list): - raise ValidationError( - "Failed to validate reference tensor names due to other validation errors in inputs/outputs." - ) - - for t in outs: - if not isinstance(t, raw_nodes.OutputTensor): - raise ValidationError("Failed to validate reference tensor names due to validation errors in outputs") - - if t.postprocessing is missing: - continue - - for postpr in t.postprocessing: - if postpr.kwargs is missing: - continue - - ref_tensor = postpr.kwargs.get("reference_tensor", missing) - if ref_tensor is not missing and ref_tensor not in valid_input_tensor_references: - raise ValidationError(f"{ref_tensor} not found in inputs") - - for t in ins: - if not isinstance(t, raw_nodes.InputTensor): - raise ValidationError("Failed to validate reference tensor names due to validation errors in inputs") - - if t.preprocessing is missing: - continue - - for prep in t.preprocessing: - if prep.kwargs is missing: - continue - - ref_tensor = prep.kwargs.get("reference_tensor", missing) - if ref_tensor is not missing and ref_tensor not in valid_input_tensor_references: - raise ValidationError(f"{ref_tensor} not found in inputs") - - if ref_tensor == t.name: - raise ValidationError(f"invalid self reference for preprocessing of tensor {t.name}") - - @validates_schema - def weights_entries_match_weights_formats(self, data, **kwargs) -> None: - weights: typing.Dict[str, WeightsEntry] = data.get("weights", {}) - for weights_format, weights_entry in weights.items(): - if not isinstance(weights_entry, get_args(raw_nodes.WeightsEntry)): - raise ValidationError("Cannot validate keys in weights field due to other validation errors.") - - if weights_format in ["pytorch_state_dict", "torchscript"]: - if weights_format == "pytorch_state_dict": - assert isinstance(weights_entry, raw_nodes.PytorchStateDictWeightsEntry) - elif weights_format == "torchscript": - assert isinstance(weights_entry, raw_nodes.TorchscriptWeightsEntry) - else: - raise NotImplementedError - - if weights_entry.dependencies is missing and weights_entry.pytorch_version is missing: - self.warn(f"weights:{weights_format}", "missing 'pytorch_version'") - - if weights_format in ["keras_hdf5", "tensorflow_js", "tensorflow_saved_model_bundle"]: - if weights_format == "keras_hdf5": - assert isinstance(weights_entry, raw_nodes.KerasHdf5WeightsEntry) - elif weights_format == "tensorflow_js": - assert isinstance(weights_entry, raw_nodes.TensorflowJsWeightsEntry) - elif weights_format == "tensorflow_saved_model_bundle": - assert isinstance(weights_entry, raw_nodes.TensorflowSavedModelBundleWeightsEntry) - else: - raise NotImplementedError - - if weights_entry.dependencies is missing and weights_entry.tensorflow_version is missing: - self.warn(f"weights:{weights_format}", "missing 'tensorflow_version'") - - if weights_format == "onnx": - assert isinstance(weights_entry, raw_nodes.OnnxWeightsEntry) - if weights_entry.dependencies is missing and weights_entry.opset_version is missing: - self.warn(f"weights:{weights_format}", "missing 'opset_version'") diff --git a/bioimageio/spec/model/v0_4/utils.py b/bioimageio/spec/model/v0_4/utils.py deleted file mode 100644 index 7f1649f0c..000000000 --- a/bioimageio/spec/model/v0_4/utils.py +++ /dev/null @@ -1 +0,0 @@ -from ..v0_3.utils import filter_resource_description diff --git a/bioimageio/spec/model/v0_5.py b/bioimageio/spec/model/v0_5.py new file mode 100644 index 000000000..cb9b82ca0 --- /dev/null +++ b/bioimageio/spec/model/v0_5.py @@ -0,0 +1,2434 @@ +from __future__ import annotations + +import collections.abc +import re +import warnings +from abc import ABC +from copy import deepcopy +from datetime import datetime +from itertools import chain +from pathlib import Path, PurePosixPath +from tempfile import mkdtemp +from typing import ( + TYPE_CHECKING, + Any, + ClassVar, + Dict, + FrozenSet, + Generic, + List, + Literal, + Mapping, + Optional, + Sequence, + Set, + Tuple, + Type, + TypeVar, + Union, + cast, +) + +import imageio +import numpy as np +from annotated_types import Ge, Gt, Interval, MaxLen, MinLen, Predicate +from imageio.v3 import imread # pyright: ignore[reportUnknownVariableType] +from numpy.typing import NDArray +from pydantic import Field, ValidationInfo, field_validator, model_validator +from typing_extensions import Annotated, LiteralString, Self, assert_never + +from bioimageio.spec._internal.validated_string import ValidatedString + +from .._internal.common_nodes import ( + Converter, + InvalidDescr, + Node, + NodeWithExplicitlySetFields, +) +from .._internal.constants import DTYPE_LIMITS +from .._internal.field_warning import issue_warning, warn +from .._internal.io import BioimageioYamlContent as BioimageioYamlContent +from .._internal.io import FileDescr as FileDescr +from .._internal.io import Sha256 as Sha256 +from .._internal.io import WithSuffix, download +from .._internal.io_basics import AbsoluteFilePath as AbsoluteFilePath +from .._internal.io_utils import load_array +from .._internal.types import Datetime as Datetime +from .._internal.types import DeprecatedLicenseId as DeprecatedLicenseId +from .._internal.types import Identifier as Identifier +from .._internal.types import ImportantFileSource, LowerCaseIdentifierAnno, SiUnit +from .._internal.types import LicenseId as LicenseId +from .._internal.types import ModelId as ModelId +from .._internal.types import NotEmpty as NotEmpty +from .._internal.types import ResourceId as ResourceId +from .._internal.url import HttpUrl as HttpUrl +from .._internal.validation_context import validation_context_var +from .._internal.version_type import Version as Version +from .._internal.warning_levels import INFO +from ..dataset.v0_3 import DatasetDescr as DatasetDescr +from ..dataset.v0_3 import LinkedDataset as LinkedDataset +from ..dataset.v0_3 import Uploader as Uploader +from ..generic.v0_3 import Author as Author +from ..generic.v0_3 import BadgeDescr as BadgeDescr +from ..generic.v0_3 import CiteEntry as CiteEntry +from ..generic.v0_3 import ( + DocumentationSource, + GenericModelDescrBase, + _author_conv, # pyright: ignore[reportPrivateUsage] + _maintainer_conv, # pyright: ignore[reportPrivateUsage] +) +from ..generic.v0_3 import Doi as Doi +from ..generic.v0_3 import LinkedResource as LinkedResource +from ..generic.v0_3 import Maintainer as Maintainer +from ..generic.v0_3 import OrcidId as OrcidId +from ..generic.v0_3 import RelativeFilePath as RelativeFilePath +from .v0_4 import Author as _Author_v0_4 +from .v0_4 import BinarizeDescr as _BinarizeDescr_v0_4 +from .v0_4 import BinarizeKwargs as BinarizeKwargs +from .v0_4 import CallableFromDepencency as CallableFromDepencency +from .v0_4 import CallableFromDepencency as _CallableFromDepencency_v0_4 +from .v0_4 import CallableFromFile as _CallableFromFile_v0_4 +from .v0_4 import ClipDescr as _ClipDescr_v0_4 +from .v0_4 import ClipKwargs as ClipKwargs +from .v0_4 import ImplicitOutputShape as _ImplicitOutputShape_v0_4 +from .v0_4 import InputTensorDescr as _InputTensorDescr_v0_4 +from .v0_4 import KnownRunMode as KnownRunMode +from .v0_4 import ModelDescr as _ModelDescr_v0_4 +from .v0_4 import OutputTensorDescr as _OutputTensorDescr_v0_4 +from .v0_4 import ParameterizedInputShape as _ParameterizedInputShape_v0_4 +from .v0_4 import PostprocessingDescr as _PostprocessingDescr_v0_4 +from .v0_4 import PreprocessingDescr as _PreprocessingDescr_v0_4 +from .v0_4 import ProcessingKwargs as ProcessingKwargs +from .v0_4 import RunMode as RunMode +from .v0_4 import ScaleLinearDescr as _ScaleLinearDescr_v0_4 +from .v0_4 import ScaleMeanVarianceDescr as _ScaleMeanVarianceDescr_v0_4 +from .v0_4 import ScaleRangeDescr as _ScaleRangeDescr_v0_4 +from .v0_4 import SigmoidDescr as _SigmoidDescr_v0_4 +from .v0_4 import TensorName as _TensorName_v0_4 +from .v0_4 import WeightsFormat as WeightsFormat +from .v0_4 import ZeroMeanUnitVarianceDescr as _ZeroMeanUnitVarianceDescr_v0_4 + +# unit names from https://ngff.openmicroscopy.org/latest/#axes-md +SpaceUnit = Literal[ + "attometer", + "angstrom", + "centimeter", + "decimeter", + "exameter", + "femtometer", + "foot", + "gigameter", + "hectometer", + "inch", + "kilometer", + "megameter", + "meter", + "micrometer", + "mile", + "millimeter", + "nanometer", + "parsec", + "petameter", + "picometer", + "terameter", + "yard", + "yoctometer", + "yottameter", + "zeptometer", + "zettameter", +] + +TimeUnit = Literal[ + "attosecond", + "centisecond", + "day", + "decisecond", + "exasecond", + "femtosecond", + "gigasecond", + "hectosecond", + "hour", + "kilosecond", + "megasecond", + "microsecond", + "millisecond", + "minute", + "nanosecond", + "petasecond", + "picosecond", + "second", + "terasecond", + "yoctosecond", + "yottasecond", + "zeptosecond", + "zettasecond", +] + +AxisType = Literal["batch", "channel", "index", "time", "space"] +TensorId = ValidatedString[Annotated[LowerCaseIdentifierAnno, MaxLen(32)]] +AxisId = ValidatedString[Annotated[LowerCaseIdentifierAnno, MaxLen(16)]] + + +NonBatchAxisId = Annotated[AxisId, Predicate(lambda x: x != "batch")] + +PostprocessingId = Literal[ + "binarize", + "clip", + "ensure_dtype", + "fixed_zero_mean_unit_variance", + "scale_linear", + "scale_mean_variance", + "scale_range", + "sigmoid", + "zero_mean_unit_variance", +] +PreprocessingId = Literal[ + "binarize", + "clip", + "ensure_dtype", + "scale_linear", + "sigmoid", + "zero_mean_unit_variance", + "scale_range", +] + + +SAME_AS_TYPE = "" + + +class ParameterizedSize(Node): + """Describes a range of valid tensor axis sizes as `size = min + n*step`. + `n` in this equation is the same for all axis parameterized in this manner across the whole model. + """ + + N: ClassVar[Type[int]] = int + """integer to parameterize all axes with a `ParameterizedSize`""" + + min: Annotated[int, Gt(0)] + step: Annotated[int, Gt(0)] + + def validate_size(self, size: int) -> int: + if size < self.min: + raise ValueError(f"size {size} < {self.min}") + if (size - self.min) % self.step != 0: + raise ValueError( + f"axis of size {size} is not parameterized by `min + n*step` =" + + f" `{self.min} + n*{self.step}`" + ) + + return size + + def get_size(self, n: ParameterizedSize.N) -> int: + return self.min + self.step * n + + +class SizeReference(Node): + """A tensor axis size (extent in pixels/frames) defined in relation to a reference axis. + + `axis.size = reference.size * reference.scale / axis.scale + offset` + + note: + 1. The axis and the referenced axis need to have the same unit (or no unit). + 2. Batch axes may not be referenced. + 3. Fractions are rounded down. + + example: + An unisotropic input image of w*h=100*49 pixels depicts a phsical space of 200*196mmยฒ. + Let's assume that we want to express the image height h in relation to its width w + instead of only accepting input images of exactly 100*49 pixels + (for example to express a range of valid image shapes by parametrizing w, see `ParameterizedSize`). + + >>> w = SpaceInputAxis(id=AxisId("w"), size=100, unit="millimeter", scale=2) + >>> h = SpaceInputAxis( + ... id=AxisId("h"), + ... size=SizeReference(tensor_id=TensorId("input"), axis_id=AxisId("w"), offset=-1), + ... unit="millimeter", + ... scale=4, + ... ) + >>> print(h.size.compute(h, w)) + 49 + + -> h = w * w.scale / h.scale + offset = 100 * 2mm / 4mm - 1 = 49 + """ + + tensor_id: TensorId + """tensor id of the reference axis""" + + axis_id: AxisId + """axis id of the reference axis""" + + offset: int = 0 + + def get_size( + self, + axis: Union[ + ChannelAxis, + IndexAxis, + TimeInputAxis, + SpaceInputAxis, + TimeOutputAxis, + SpaceOutputAxis, + ], + ref_axis: Union[ + ChannelAxis, + IndexAxis, + TimeInputAxis, + SpaceInputAxis, + TimeOutputAxis, + SpaceOutputAxis, + ], + n: ParameterizedSize.N, + ): + """helper method to compute concrete size for a given axis and its reference axis. + If the reference axis is parameterized, `n` is used to compute the concrete size of it, see `ParameterizedSize`. + """ + assert ( + axis.size == self + ), "Given `axis.size` is not defined by this `SizeReference`" + + assert ( + ref_axis.id == self.axis_id + ), f"Expected `ref_axis.id` to be {self.axis_id}, but got {ref_axis.id}." + + assert axis.unit == ref_axis.unit, ( + "`SizeReference` requires `axis` and `ref_axis` to have the same `unit`," + f" but {axis.unit}!={ref_axis.unit}" + ) + + if isinstance(ref_axis.size, (int, float)): + ref_size = ref_axis.size + elif isinstance(ref_axis.size, ParameterizedSize): + ref_size = ref_axis.size.get_size(n) + elif isinstance(ref_axis.size, SizeReference): + raise ValueError( + "Reference axis referenced in `SizeReference` may not be sized by a" + + " `SizeReference` itself." + ) + else: + assert_never(ref_axis.size) + + return int(ref_size * ref_axis.scale / axis.scale + self.offset) + + @staticmethod + def _get_unit( + axis: Union[ + ChannelAxis, + IndexAxis, + TimeInputAxis, + SpaceInputAxis, + TimeOutputAxis, + SpaceOutputAxis, + ], + ): + return axis.unit + + +# this Axis definition is compatible with the NGFF draft from July 10, 2023 +# https://ngff.openmicroscopy.org/latest/#axes-md +class AxisBase(NodeWithExplicitlySetFields): + fields_to_set_explicitly: ClassVar[FrozenSet[LiteralString]] = frozenset({"type"}) + + id: AxisId + """An axis id unique across all axes of one tensor.""" + + description: Annotated[str, MaxLen(128)] = "" + + __hash__ = NodeWithExplicitlySetFields.__hash__ + + +class WithHalo(Node): + halo: Annotated[int, Ge(0)] = 0 + """The halo should be cropped from the output tensor to avoid boundary effects. + It is to be cropped from both sides, i.e. `size_after_crop = size - 2 * halo`. + To document a halo that is already cropped by the model use `size.offset` instead.""" + + +class BatchAxis(AxisBase): + type: Literal["batch"] = "batch" + id: Annotated[AxisId, Predicate(lambda x: x == AxisId("batch"))] = AxisId("batch") + size: Optional[Literal[1]] = None + """The batch size may be fixed to 1, + otherwise (the default) it may be chosen arbitrarily depending on available memory""" + + @property + def scale(self): + return 1.0 + + @property + def unit(self): + return None + + +class ChannelAxis(AxisBase): + type: Literal["channel"] = "channel" + id: NonBatchAxisId = AxisId("channel") + channel_names: List[Identifier] + + @property + def size(self) -> int: + return len(self.channel_names) + + @property + def scale(self) -> float: + return 1.0 + + @property + def unit(self): + return None + + +class IndexTimeSpaceAxisBase(AxisBase): + size: Annotated[ + Union[Annotated[int, Gt(0)], ParameterizedSize, SizeReference], + Field( + examples=[ + 10, + ParameterizedSize(min=32, step=16).model_dump(mode="json"), + SizeReference( + tensor_id=TensorId("t"), axis_id=AxisId("a"), offset=5 + ).model_dump(mode="json"), + ] + ), + ] + """The size/length of an axis can be specified as + - fixed integer + - parameterized series of valid sizes (`ParameterizedSize`) + - reference to another axis with an optional offset (`SizeReference`) + """ + + +class IndexAxis(IndexTimeSpaceAxisBase): + type: Literal["index"] = "index" + id: NonBatchAxisId = AxisId("index") + + @property + def scale(self) -> float: + return 1.0 + + @property + def unit(self): + return None + + +class TimeAxisBase(IndexTimeSpaceAxisBase): + type: Literal["time"] = "time" + id: NonBatchAxisId = AxisId("time") + unit: Optional[TimeUnit] = None + scale: Annotated[float, Gt(0)] = 1.0 + + +class TimeInputAxis(TimeAxisBase): + pass + + +class SpaceAxisBase(IndexTimeSpaceAxisBase): + type: Literal["space"] = "space" + id: Annotated[NonBatchAxisId, Field(examples=["x", "y", "z"])] = AxisId("x") + unit: Optional[SpaceUnit] = None + scale: Annotated[float, Gt(0)] = 1.0 + + +class SpaceInputAxis(SpaceAxisBase): + pass + + +_InputAxisUnion = Union[ + BatchAxis, ChannelAxis, IndexAxis, TimeInputAxis, SpaceInputAxis +] +InputAxis = Annotated[_InputAxisUnion, Field(discriminator="type")] + + +class TimeOutputAxis(TimeAxisBase, WithHalo): + pass + + +class SpaceOutputAxis(SpaceAxisBase, WithHalo): + pass + + +_OutputAxisUnion = Union[ + BatchAxis, ChannelAxis, IndexAxis, TimeOutputAxis, SpaceOutputAxis +] +OutputAxis = Annotated[_OutputAxisUnion, Field(discriminator="type")] + +AnyAxis = Union[InputAxis, OutputAxis] + +TVs = Union[ + NotEmpty[List[int]], + NotEmpty[List[float]], + NotEmpty[List[bool]], + NotEmpty[List[str]], +] + + +NominalOrOrdinalDType = Literal[ + "float32", + "float64", + "uint8", + "int8", + "uint16", + "int16", + "uint32", + "int32", + "uint64", + "int64", + "bool", +] + + +class NominalOrOrdinalDataDescr(Node): + values: TVs + """A fixed set of nominal or an ascending sequence of ordinal values. + In this case `data_type` is required to be an unsigend integer type, e.g. 'uint8'. + String `values` are interpreted as labels for tensor values 0, ..., N. + Note: as YAML 1.2 does not natively support a "set" datatype, + nominal values should be given as a sequence (aka list/array) as well. + """ + + type: Annotated[ + NominalOrOrdinalDType, + Field( + examples=[ + "float32", + "uint8", + "uint16", + "int64", + "bool", + ], + ), + ] = "uint8" + + @model_validator(mode="after") + def _validate_values_match_type( + self, + ) -> Self: + incompatible: List[Any] = [] + for v in self.values: + if self.type == "bool": + if not isinstance(v, bool): + incompatible.append(v) + elif self.type in DTYPE_LIMITS: + if ( + isinstance(v, (int, float)) + and ( + v < DTYPE_LIMITS[self.type].min + or v > DTYPE_LIMITS[self.type].max + ) + or (isinstance(v, str) and "uint" not in self.type) + or (isinstance(v, float) and "int" in self.type) + ): + incompatible.append(v) + else: + incompatible.append(v) + + if len(incompatible) == 5: + incompatible.append("...") + break + + if incompatible: + raise ValueError( + f"data type '{self.type}' incompatible with values {incompatible}" + ) + + return self + + unit: Optional[Union[Literal["arbitrary unit"], SiUnit]] = None + + @property + def range(self): + if isinstance(self.values[0], str): + return 0, len(self.values) - 1 + else: + return min(self.values), max(self.values) + + +IntervalOrRatioDType = Literal[ + "float32", + "float64", + "uint8", + "int8", + "uint16", + "int16", + "uint32", + "int32", + "uint64", + "int64", +] + + +class IntervalOrRatioDataDescr(Node): + type: Annotated[ # todo: rename to dtype + IntervalOrRatioDType, + Field( + examples=["float32", "float64", "uint8", "uint16"], + ), + ] = "float32" + range: Tuple[Optional[float], Optional[float]] = ( + None, + None, + ) + """Tuple `(minimum, maximum)` specifying the allowed range of the data in this tensor. + `None` corresponds to min/max of what can be expressed by `data_type`.""" + unit: Union[Literal["arbitrary unit"], SiUnit] = "arbitrary unit" + scale: float = 1.0 + """Scale for data on an interval (or ratio) scale.""" + offset: Optional[float] = None + """Offset for data on a ratio scale.""" + + +TensorDataDescr = Union[NominalOrOrdinalDataDescr, IntervalOrRatioDataDescr] + + +class ProcessingDescrBase(NodeWithExplicitlySetFields, ABC): + """processing base class""" + + # id: Literal[PreprocessingId, PostprocessingId] # make abstract field + fields_to_set_explicitly: ClassVar[FrozenSet[LiteralString]] = frozenset({"id"}) + + +class BinarizeDescr(ProcessingDescrBase): + """Binarize the tensor with a fixed threshold. + Values above the threshold will be set to one, values below the threshold to zero. + """ + + id: Literal["binarize"] = "binarize" + kwargs: BinarizeKwargs + + +class ClipDescr(ProcessingDescrBase): + """Set tensor values below min to min and above max to max.""" + + id: Literal["clip"] = "clip" + kwargs: ClipKwargs + + +class EnsureDtypeKwargs(ProcessingKwargs): + dtype: str + + +class EnsureDtypeDescr(ProcessingDescrBase): + id: Literal["ensure_dtype"] = "ensure_dtype" + kwargs: EnsureDtypeKwargs + + +class ScaleLinearKwargs(ProcessingKwargs): + axis: Annotated[Optional[NonBatchAxisId], Field(examples=["channel"])] = ( + None # todo: validate existence of axis + ) + """The axis of non-scalar gains/offsets. + Invalid for scalar gains/offsets. + """ + + gain: Union[float, NotEmpty[List[float]]] = 1.0 + """multiplicative factor""" + + offset: Union[float, NotEmpty[List[float]]] = 0.0 + """additive term""" + + @model_validator(mode="after") + def either_gain_or_offset(self) -> Self: + if ( + self.gain == 1.0 + or isinstance(self.gain, list) + and all(g == 1.0 for g in self.gain) + ) and ( + self.offset == 0.0 + or isinstance(self.offset, list) + and all(off == 0.0 for off in self.offset) + ): + raise ValueError( + "Redundant linear scaling not allowd. Set `gain` != 1.0 and/or `offset`" + + " != 0.0." + ) + + return self + + +class ScaleLinearDescr(ProcessingDescrBase): + """Fixed linear scaling.""" + + id: Literal["scale_linear"] = "scale_linear" + kwargs: ScaleLinearKwargs + + +class SigmoidDescr(ProcessingDescrBase): + """The logistic sigmoid funciton, a.k.a. expit function.""" + + id: Literal["sigmoid"] = "sigmoid" + + @property + def kwargs(self) -> ProcessingKwargs: + """empty kwargs""" + return ProcessingKwargs() + + +class FixedZeroMeanUnitVarianceKwargs(ProcessingKwargs): + """Normalize with fixed, precomputed values for mean and variance. + See `zero_mean_unit_variance` for data dependent normalization.""" + + mean: Annotated[ + Union[float, NotEmpty[Tuple[float, ...]]], + Field(examples=[3.14, (1.1, -2.2, 3.3)]), + ] + """The mean value(s) to normalize with. Specify `axis` for a sequence of `mean` values""" + + std: Annotated[ + Union[ + Annotated[float, Ge(1e-6)], NotEmpty[Tuple[Annotated[float, Ge(1e-6)], ...]] + ], + Field(examples=[1.05, (0.1, 0.2, 0.3)]), + ] + """The standard deviation value(s) to normalize with. Size must match `mean` values.""" + + axis: Annotated[Optional[NonBatchAxisId], Field(examples=["channel", "index"])] = ( + None # todo: validate existence of axis + ) + """The axis of the mean/std values to normalize each entry along that dimension separately. + Invalid for scalar gains/offsets. + """ + + @model_validator(mode="after") + def mean_and_std_match(self) -> Self: + mean_len = 1 if isinstance(self.mean, (float, int)) else len(self.mean) + std_len = 1 if isinstance(self.std, (float, int)) else len(self.std) + if mean_len != std_len: + raise ValueError( + "size of `mean` ({mean_len}) and `std` ({std_len}) must match." + ) + + return self + + +class FixedZeroMeanUnitVarianceDescr(ProcessingDescrBase): + """Subtract a given mean and divide by a given variance.""" + + id: Literal["fixed_zero_mean_unit_variance"] = "fixed_zero_mean_unit_variance" + kwargs: FixedZeroMeanUnitVarianceKwargs + + +class ZeroMeanUnitVarianceKwargs(ProcessingKwargs): + axes: Annotated[ + Optional[Sequence[AxisId]], Field(examples=[("batch", "x", "y")]) + ] = None + """The subset of axes to normalize jointly, i.e. axes to reduce to compute mean/std. + For example to normalize 'batch', 'x' and 'y' jointly in a tensor ('batch', 'channel', 'y', 'x') + resulting in a tensor of equal shape normalized per channel, specify `axes=('batch', 'x', 'y')`. + To normalize each sample independently leave out the 'batch' axis. + Default: Scale all axes jointly.""" + + eps: Annotated[float, Interval(gt=0, le=0.1)] = 1e-6 + """epsilon for numeric stability: `out = (tensor - mean) / (std + eps)`.""" + + +class ZeroMeanUnitVarianceDescr(ProcessingDescrBase): + """Subtract mean and divide by variance.""" + + id: Literal["zero_mean_unit_variance"] = "zero_mean_unit_variance" + kwargs: ZeroMeanUnitVarianceKwargs + + +class ScaleRangeKwargs(ProcessingKwargs): + axes: Annotated[ + Optional[Sequence[AxisId]], Field(examples=[("batch", "x", "y")]) + ] = None + """The subset of axes to normalize jointly, i.e. axes to reduce to compute the min/max percentile value. + For example to normalize 'batch', 'x' and 'y' jointly in a tensor ('batch', 'channel', 'y', 'x') + resulting in a tensor of equal shape normalized per channel, specify `axes=('batch', 'x', 'y')`. + To normalize samples indepdencently, leave out the "batch" axis. + Default: Scale all axes jointly.""" + + min_percentile: Annotated[float, Interval(ge=0, lt=100)] = 0.0 + """The lower percentile used for normalization.""" + + max_percentile: Annotated[float, Interval(gt=1, le=100)] = 100.0 + """The upper percentile used for normalization + Has to be bigger than `min_percentile`. + The range is 1 to 100 instead of 0 to 100 to avoid mistakenly + accepting percentiles specified in the range 0.0 to 1.0.""" + + eps: Annotated[float, Interval(gt=0, le=0.1)] = 1e-6 + """Epsilon for numeric stability. + `out = (tensor - v_lower) / (v_upper - v_lower + eps)`; + with `v_lower,v_upper` values at the respective percentiles.""" + + reference_tensor: Optional[TensorId] = None + """Tensor ID to compute the percentiles from. Default: The tensor itself. + For any tensor in `inputs` only input tensor references are allowed.""" + + @field_validator("max_percentile", mode="after") + @classmethod + def min_smaller_max(cls, value: float, info: ValidationInfo) -> float: + if (min_p := info.data["min_percentile"]) >= value: + raise ValueError(f"min_percentile {min_p} >= max_percentile {value}") + + return value + + +class ScaleRangeDescr(ProcessingDescrBase): + """Scale with percentiles.""" + + id: Literal["scale_range"] = "scale_range" + kwargs: ScaleRangeKwargs + + +class ScaleMeanVarianceKwargs(ProcessingKwargs): + """Scale a tensor's data distribution to match another tensor's mean/std. + `out = (tensor - mean) / (std + eps) * (ref_std + eps) + ref_mean.`""" + + reference_tensor: TensorId + """Name of tensor to match.""" + + axes: Annotated[ + Optional[Sequence[AxisId]], Field(examples=[("batch", "x", "y")]) + ] = None + """The subset of axes to normalize jointly, i.e. axes to reduce to compute mean/std. + For example to normalize 'batch', 'x' and 'y' jointly in a tensor ('batch', 'channel', 'y', 'x') + resulting in a tensor of equal shape normalized per channel, specify `axes=('batch', 'x', 'y')`. + To normalize samples independently, leave out the 'batch' axis. + Default: Scale all axes jointly.""" + + eps: Annotated[float, Interval(gt=0, le=0.1)] = 1e-6 + """Epsilon for numeric stability: + `out = (tensor - mean) / (std + eps) * (ref_std + eps) + ref_mean.`""" + + +class ScaleMeanVarianceDescr(ProcessingDescrBase): + """Scale the tensor s.t. its mean and variance match a reference tensor.""" + + id: Literal["scale_mean_variance"] = "scale_mean_variance" + kwargs: ScaleMeanVarianceKwargs + + +PreprocessingDescr = Annotated[ + Union[ + BinarizeDescr, + ClipDescr, + EnsureDtypeDescr, + ScaleLinearDescr, + SigmoidDescr, + FixedZeroMeanUnitVarianceDescr, + ZeroMeanUnitVarianceDescr, + ScaleRangeDescr, + ], + Field(discriminator="id"), +] +PostprocessingDescr = Annotated[ + Union[ + BinarizeDescr, + ClipDescr, + EnsureDtypeDescr, + ScaleLinearDescr, + SigmoidDescr, + FixedZeroMeanUnitVarianceDescr, + ZeroMeanUnitVarianceDescr, + ScaleRangeDescr, + ScaleMeanVarianceDescr, + ], + Field(discriminator="id"), +] + +IO_AxisT = TypeVar("IO_AxisT", InputAxis, OutputAxis) + + +class TensorDescrBase(Node, Generic[IO_AxisT]): + id: TensorId + """Tensor id. No duplicates are allowed.""" + + description: Annotated[str, MaxLen(128)] = "" + """free text description""" + + axes: NotEmpty[Sequence[IO_AxisT]] + """tensor axes""" + + @property + def shape(self): + return tuple(a.size for a in self.axes) + + @field_validator("axes", mode="after", check_fields=False) + @classmethod + def _validate_axes(cls, axes: Sequence[AnyAxis]) -> Sequence[AnyAxis]: + seen_types: Set[str] = set() + duplicate_axes_types: Set[str] = set() + for a in axes: + if a.type in ("time", "space"): + continue # duplicates allowed + + (duplicate_axes_types if a.type in seen_types else seen_types).add(a.type) + + if duplicate_axes_types: + raise ValueError(f"Duplicate axis types: {duplicate_axes_types}") + + seen_ids: Set[AxisId] = set() + duplicate_axes_ids: Set[AxisId] = set() + for a in axes: + (duplicate_axes_ids if a.id in seen_ids else seen_ids).add(a.id) + + if duplicate_axes_ids: + raise ValueError(f"Duplicate axis ids: {duplicate_axes_ids}") + + return axes + + test_tensor: FileDescr + """An example tensor to use for testing. + Using the model with the test input tensors is expected to yield the test output tensors. + Each test tensor has be a an ndarray in the + [numpy.lib file format](https://numpy.org/doc/stable/reference/generated/numpy.lib.format.html#module-numpy.lib.format). + The file extension must be '.npy'.""" + + sample_tensor: Optional[FileDescr] = None + """A sample tensor to illustrate a possible input/output for the model, + The sample image primarily serves to inform a human user about an example use case + and is typically stored as .hdf5, .png or .tiff. + It has to be readable by the [imageio library](https://imageio.readthedocs.io/en/stable/formats/index.html#supported-formats) + (numpy's `.npy` format is not supported). + The image dimensionality has to match the number of axes specified in this tensor description. + """ + + @model_validator(mode="after") + def _validate_sample_tensor(self) -> Self: + if ( + self.sample_tensor is None + or not validation_context_var.get().perform_io_checks + ): + return self + + down = download(self.sample_tensor.source, sha256=self.sample_tensor.sha256) + + local_source = down.path + tensor: NDArray[Any] = imread( + local_source, extension=PurePosixPath(down.original_file_name).suffix + ) + n_dims = len(tensor.squeeze().shape) + n_dims_min = n_dims_max = len(self.axes) + + for a in self.axes: + if isinstance(a, BatchAxis): + n_dims_min -= 1 + elif isinstance(a.size, int): + if a.size == 1: + n_dims_min -= 1 + elif isinstance(a.size, ParameterizedSize): + if a.size.min == 1: + n_dims_min -= 1 + elif isinstance(a.size, SizeReference): + if a.size.offset < 2: + # size reference may result in singleton axis + n_dims_min -= 1 + else: + assert_never(a.size) + + n_dims_min = max(0, n_dims_min) + if n_dims < n_dims_min or n_dims > n_dims_max: + raise ValueError( + f"Expected sample tensor to have {n_dims_min} to" + + f" {n_dims_max} dimensions, but found {n_dims} (shape: {tensor.shape})." + ) + + return self + + data: Union[TensorDataDescr, NotEmpty[Sequence[TensorDataDescr]]] = ( + IntervalOrRatioDataDescr() + ) + """Description of the tensor's data values, optionally per channel. + If specified per channel, the data `type` needs to match across channels.""" + + @property + def dtype( + self, + ) -> Literal[ + "float32", + "float64", + "uint8", + "int8", + "uint16", + "int16", + "uint32", + "int32", + "uint64", + "int64", + "bool", + ]: + """dtype as specified under `data.type` or `data[i].type`""" + if isinstance(self.data, collections.abc.Sequence): + return self.data[0].type + else: + return self.data.type + + @field_validator("data", mode="after") + @classmethod + def _check_data_type_across_channels( + cls, value: Union[TensorDataDescr, NotEmpty[Sequence[TensorDataDescr]]] + ) -> Union[TensorDataDescr, NotEmpty[Sequence[TensorDataDescr]]]: + if not isinstance(value, list): + return value + + dtypes = {t.type for t in value} + if len(dtypes) > 1: + raise ValueError( + "Tensor data descriptions per channel need to agree in their data" + + f" `type`, but found {dtypes}." + ) + + return value + + @model_validator(mode="after") + def _check_data_matches_channelaxis(self) -> Self: + if not isinstance(self.data, (list, tuple)): + return self + + for a in self.axes: + if isinstance(a, ChannelAxis): + size = a.size + assert isinstance(size, int) + break + else: + return self + + if len(self.data) != size: + raise ValueError( + f"Got tensor data descriptions for {len(self.data)} channels, but" + + f" '{a.id}' axis has size {size}." + ) + + return self + + def get_axis_sizes_for_array(self, array: NDArray[Any]) -> Dict[AxisId, int]: + if len(array.shape) != len(self.axes): + raise ValueError( + f"Dimension mismatch: array shape {array.shape} (#{len(array.shape)})" + + f" incompatible with {len(self.axes)} axes." + ) + return {a.id: array.shape[i] for i, a in enumerate(self.axes)} + + +class InputTensorDescr(TensorDescrBase[InputAxis]): + id: TensorId = TensorId("input") + """Input tensor id. + No duplicates are allowed across all inputs and outputs.""" + + preprocessing: List[PreprocessingDescr] = Field(default_factory=list) + """Description of how this input should be preprocessed.""" + + @model_validator(mode="after") + def _validate_preprocessing_kwargs(self) -> Self: + axes_ids = [a.id for a in self.axes] + for p in self.preprocessing: + kwargs_axes: Union[Any, Sequence[Any]] = p.kwargs.get("axes", ()) + if not isinstance(kwargs_axes, collections.abc.Sequence): + raise ValueError( + f"Expeted `axes` to be a sequence, but got {type(kwargs_axes)}" + ) + + if any(a not in axes_ids for a in kwargs_axes): + raise ValueError("`kwargs.axes` needs to be subset of axes ids") + + return self + + +def convert_axes( + axes: str, + *, + shape: Union[ + Sequence[int], _ParameterizedInputShape_v0_4, _ImplicitOutputShape_v0_4 + ], + tensor_type: Literal["input", "output"], + halo: Optional[Sequence[int]], + size_refs: Mapping[_TensorName_v0_4, Mapping[str, int]], +): + ret: List[AnyAxis] = [] + for i, a in enumerate(axes): + axis_type = _AXIS_TYPE_MAP.get(a, a) + if axis_type == "batch": + ret.append(BatchAxis()) + continue + + scale = 1.0 + if isinstance(shape, _ParameterizedInputShape_v0_4): + if shape.step[i] == 0: + size = shape.min[i] + else: + size = ParameterizedSize(min=shape.min[i], step=shape.step[i]) + elif isinstance(shape, _ImplicitOutputShape_v0_4): + ref_t = str(shape.reference_tensor) + if ref_t.count(".") == 1: + t_id, orig_a_id = ref_t.split(".") + else: + t_id = ref_t + orig_a_id = a + + a_id = _AXIS_ID_MAP.get(orig_a_id, a) + if not (orig_scale := shape.scale[i]): + # old way to insert a new axis dimension + size = int(2 * shape.offset[i]) + else: + scale = 1 / orig_scale + if axis_type in ("channel", "index"): + # these axes no longer have a scale + offset_from_scale = orig_scale * size_refs.get( + _TensorName_v0_4(t_id), {} + ).get(orig_a_id, 0) + else: + offset_from_scale = 0 + size = SizeReference( + tensor_id=TensorId(t_id), + axis_id=AxisId(a_id), + offset=int(offset_from_scale + 2 * shape.offset[i]), + ) + elif isinstance(shape, collections.abc.Sequence): + size: Any = shape[i] + assert isinstance(size, int) + else: + assert_never(shape) + + if axis_type == "time": + if tensor_type == "input": + ret.append(TimeInputAxis(size=size, scale=scale)) + else: + ret.append( + TimeOutputAxis( + size=size, scale=scale, halo=0 if halo is None else halo[i] + ) + ) + elif axis_type == "index": + ret.append(IndexAxis(size=size)) + elif axis_type == "channel": + assert not isinstance(size, ParameterizedSize) + if isinstance(size, SizeReference): + warnings.warn( + "Conversion of channel size from an implicit output shape may by" + + " wrong" + ) + ret.append( + ChannelAxis( + channel_names=[ + Identifier(f"channel{i}") for i in range(size.offset) + ] + ) + ) + else: + ret.append( + ChannelAxis( + channel_names=[Identifier(f"channel{i}") for i in range(size)] + ) + ) + elif axis_type == "space": + if tensor_type == "input": + ret.append(SpaceInputAxis(id=AxisId(a), size=size, scale=scale)) + else: + ret.append(SpaceOutputAxis(id=AxisId(a), size=size, scale=scale)) + + return ret + + +_AXIS_TYPE_MAP = { + "b": "batch", + "t": "time", + "i": "index", + "c": "channel", + "x": "space", + "y": "space", + "z": "space", +} + +_AXIS_ID_MAP = { + "b": "batch", + "t": "time", + "i": "index", + "c": "channel", +} + + +def _axes_letters_to_ids( + axes: Optional[str], +) -> Optional[List[AxisId]]: + if axes is None: + return None + return [AxisId(_AXIS_ID_MAP.get(a, a)) for a in map(str, axes)] + + +def _get_complement_v04_axis( + tensor_axes: Sequence[str], axes: Optional[Sequence[str]] +) -> Optional[AxisId]: + if axes is None: + return None + + axes_str = str(axes) + all_axes = set(str(tensor_axes)) | {"b"} + complement_axes = [a for a in axes_str if a not in all_axes] + if len(complement_axes) > 1: + raise ValueError( + f"Expected none or a single complement axis, but axes '{axes}' " + + f"for tensor dims '{all_axes}' leave '{complement_axes}'." + ) + + return None if not complement_axes else AxisId(complement_axes[0]) + + +def _convert_proc( + p: Union[_PreprocessingDescr_v0_4, _PostprocessingDescr_v0_4], + tensor_axes: Sequence[str], +) -> Union[PreprocessingDescr, PostprocessingDescr]: + if isinstance(p, _BinarizeDescr_v0_4): + return BinarizeDescr(kwargs=BinarizeKwargs(threshold=p.kwargs.threshold)) + elif isinstance(p, _ClipDescr_v0_4): + return ClipDescr(kwargs=ClipKwargs(min=p.kwargs.min, max=p.kwargs.max)) + elif isinstance(p, _SigmoidDescr_v0_4): + return SigmoidDescr() + elif isinstance(p, _ScaleLinearDescr_v0_4): + axes = _axes_letters_to_ids(p.kwargs.axes) + if p.kwargs.axes is None: + axis = None + else: + axis = _get_complement_v04_axis(tensor_axes, p.kwargs.axes) + + return ScaleLinearDescr( + kwargs=ScaleLinearKwargs( + axis=axis, gain=p.kwargs.gain, offset=p.kwargs.offset + ) + ) + elif isinstance(p, _ScaleMeanVarianceDescr_v0_4): + return ScaleMeanVarianceDescr( + kwargs=ScaleMeanVarianceKwargs( + axes=_axes_letters_to_ids(p.kwargs.axes), + reference_tensor=TensorId(str(p.kwargs.reference_tensor)), + eps=p.kwargs.eps, + ) + ) + elif isinstance(p, _ZeroMeanUnitVarianceDescr_v0_4): + if p.kwargs.mode == "fixed": + mean = p.kwargs.mean + assert mean is not None + if isinstance(mean, list): + mean = tuple(mean) + + std = p.kwargs.std + assert std is not None + if isinstance(std, list): + std = tuple(std) + + return FixedZeroMeanUnitVarianceDescr( + kwargs=FixedZeroMeanUnitVarianceKwargs(mean=mean, std=std) + ) + else: + axes = _axes_letters_to_ids(p.kwargs.axes) or [] + if p.kwargs.mode == "per_dataset": + axes = [AxisId("batch")] + axes + if not axes: + axes = None + return ZeroMeanUnitVarianceDescr( + kwargs=ZeroMeanUnitVarianceKwargs(axes=axes, eps=p.kwargs.eps) + ) + + elif isinstance(p, _ScaleRangeDescr_v0_4): + return ScaleRangeDescr( + kwargs=ScaleRangeKwargs( + axes=_axes_letters_to_ids(p.kwargs.axes), + min_percentile=p.kwargs.min_percentile, + max_percentile=p.kwargs.max_percentile, + eps=p.kwargs.eps, + ) + ) + else: + assert_never(p) + + +class _InputTensorConv( + Converter[ + _InputTensorDescr_v0_4, + InputTensorDescr, + ImportantFileSource, + Optional[ImportantFileSource], + Mapping[_TensorName_v0_4, Mapping[str, int]], + ] +): + def _convert( + self, + src: _InputTensorDescr_v0_4, + tgt: "type[InputTensorDescr] | type[dict[str, Any]]", + test_tensor: ImportantFileSource, + sample_tensor: Optional[ImportantFileSource], + size_refs: Mapping[_TensorName_v0_4, Mapping[str, int]], + ) -> "InputTensorDescr | dict[str, Any]": + axes: List[InputAxis] = convert_axes( # pyright: ignore[reportAssignmentType] + src.axes, + shape=src.shape, + tensor_type="input", + halo=None, + size_refs=size_refs, + ) + prep: List[PreprocessingDescr] = [] + for p in src.preprocessing: + cp = _convert_proc(p, src.axes) + assert not isinstance(cp, ScaleMeanVarianceDescr) + prep.append(cp) + + return tgt( + axes=axes, + id=TensorId(str(src.name)), + test_tensor=FileDescr(source=test_tensor), + sample_tensor=( + None if sample_tensor is None else FileDescr(source=sample_tensor) + ), + data=dict(type=src.data_type), # pyright: ignore[reportArgumentType] + preprocessing=prep, + ) + + +_input_tensor_conv = _InputTensorConv(_InputTensorDescr_v0_4, InputTensorDescr) + + +class OutputTensorDescr(TensorDescrBase[OutputAxis]): + id: TensorId = TensorId("output") + """Output tensor id. + No duplicates are allowed across all inputs and outputs.""" + + postprocessing: List[PostprocessingDescr] = Field(default_factory=list) + """Description of how this output should be postprocessed.""" + + @model_validator(mode="after") + def _validate_postprocessing_kwargs(self) -> Self: + axes_ids = [a.id for a in self.axes] + for p in self.postprocessing: + kwargs_axes: Union[Any, Sequence[Any]] = p.kwargs.get("axes", ()) + if not isinstance(kwargs_axes, collections.abc.Sequence): + raise ValueError( + f"expected `axes` sequence, but got {type(kwargs_axes)}" + ) + + if any(a not in axes_ids for a in kwargs_axes): + raise ValueError("`kwargs.axes` needs to be subset of axes ids") + + return self + + +class _OutputTensorConv( + Converter[ + _OutputTensorDescr_v0_4, + OutputTensorDescr, + ImportantFileSource, + Optional[ImportantFileSource], + Mapping[_TensorName_v0_4, Mapping[str, int]], + ] +): + def _convert( + self, + src: _OutputTensorDescr_v0_4, + tgt: "type[OutputTensorDescr] | type[dict[str, Any]]", + test_tensor: ImportantFileSource, + sample_tensor: Optional[ImportantFileSource], + size_refs: Mapping[_TensorName_v0_4, Mapping[str, int]], + ) -> "OutputTensorDescr | dict[str, Any]": + # TODO: split convert_axes into convert_output_axes and convert_input_axes + axes: List[OutputAxis] = convert_axes( # pyright: ignore[reportAssignmentType] + src.axes, + shape=src.shape, + tensor_type="output", + halo=src.halo, + size_refs=size_refs, + ) + data_descr: Dict[str, Any] = dict(type=src.data_type) + if data_descr["type"] == "bool": + data_descr["values"] = [False, True] + + return tgt( + axes=axes, + id=TensorId(str(src.name)), + test_tensor=FileDescr(source=test_tensor), + sample_tensor=( + None if sample_tensor is None else FileDescr(source=sample_tensor) + ), + data=data_descr, # pyright: ignore[reportArgumentType] + postprocessing=[_convert_proc(p, src.axes) for p in src.postprocessing], + ) + + +_output_tensor_conv = _OutputTensorConv(_OutputTensorDescr_v0_4, OutputTensorDescr) + + +TensorDescr = Union[InputTensorDescr, OutputTensorDescr] + + +def validate_tensors( + tensors: Mapping[TensorId, Tuple[TensorDescr, NDArray[Any]]], + tensor_origin: str, # for more precise error messages, e.g. 'test_tensor' +): + all_tensor_axes: Dict[TensorId, Dict[AxisId, Tuple[AnyAxis, int]]] = {} + + def e_msg(d: TensorDescr): + return f"{'inputs' if isinstance(d, InputTensorDescr) else 'outputs'}[{d.id}]" + + for descr, array in tensors.values(): + try: + axis_sizes = descr.get_axis_sizes_for_array(array) + except ValueError as e: + raise ValueError(f"{e_msg(descr)} {e}") + else: + all_tensor_axes[descr.id] = { + a.id: (a, axis_sizes[a.id]) for a in descr.axes + } + + for descr, array in tensors.values(): + if array.dtype.name != descr.dtype: + raise ValueError( + f"{e_msg(descr)}.{tensor_origin}.dtype '{array.dtype.name}' does not" + + f" match described dtype '{descr.dtype}'" + ) + + for a in descr.axes: + actual_size = all_tensor_axes[descr.id][a.id][1] + if a.size is None: + continue + + if isinstance(a.size, int): + if actual_size != a.size: + raise ValueError( + f"{e_msg(descr)}.{tensor_origin}: axis '{a.id}' " + + f"has incompatible size {actual_size}, expected {a.size}" + ) + elif isinstance(a.size, ParameterizedSize): + _ = a.size.validate_size(actual_size) + elif isinstance(a.size, SizeReference): + ref_tensor_axes = all_tensor_axes.get(a.size.tensor_id) + if ref_tensor_axes is None: + raise ValueError( + f"{e_msg(descr)}.axes[{a.id}].size.tensor_id: Unknown tensor" + + f" reference '{a.size.tensor_id}'" + ) + + ref_axis, ref_size = ref_tensor_axes.get(a.size.axis_id, (None, None)) + if ref_axis is None or ref_size is None: + raise ValueError( + f"{e_msg(descr)}.axes[{a.id}].size.axis_id: Unknown tensor axis" + + f" reference '{a.size.tensor_id}.{a.size.axis_id}" + ) + + if a.unit != ref_axis.unit: + raise ValueError( + f"{e_msg(descr)}.axes[{a.id}].size: `SizeReference` requires" + + " axis and reference axis to have the same `unit`, but" + + f" {a.unit}!={ref_axis.unit}" + ) + + if actual_size != ( + expected_size := ( + ref_size * ref_axis.scale / a.scale + a.size.offset + ) + ): + raise ValueError( + f"{e_msg(descr)}.{tensor_origin}: axis '{a.id}' of size" + + f" {actual_size} invalid for referenced size {ref_size};" + + f" expected {expected_size}" + ) + else: + assert_never(a.size) + + +class EnvironmentFileDescr(FileDescr): + source: Annotated[ + ImportantFileSource, + WithSuffix((".yaml", ".yml"), case_sensitive=True), + Field( + examples=["environment.yaml"], + ), + ] + """โˆˆ๐Ÿ“ฆ Conda environment file. + Allows to specify custom dependencies, see conda docs: + - [Exporting an environment file across platforms](https://conda.io/projects/conda/en/latest/user-guide/tasks/manage-environments.html#exporting-an-environment-file-across-platforms) + - [Creating an environment file manually](https://conda.io/projects/conda/en/latest/user-guide/tasks/manage-environments.html#creating-an-environment-file-manually) + """ + + +class _ArchitectureCallableDescr(Node): + callable: Annotated[Identifier, Field(examples=["MyNetworkClass", "get_my_model"])] + """Identifier of the callable that returns a torch.nn.Module instance.""" + + kwargs: Dict[str, Any] = Field(default_factory=dict) + """key word arguments for the `callable`""" + + +class ArchitectureFromFileDescr(_ArchitectureCallableDescr, FileDescr): + pass + + +class ArchitectureFromLibraryDescr(_ArchitectureCallableDescr): + import_from: str + """Where to import the callable from, i.e. `from import `""" + + +ArchitectureDescr = Union[ArchitectureFromFileDescr, ArchitectureFromLibraryDescr] + + +class _ArchFileConv( + Converter[ + _CallableFromFile_v0_4, + ArchitectureFromFileDescr, + Optional[Sha256], + Dict[str, Any], + ] +): + def _convert( + self, + src: _CallableFromFile_v0_4, + tgt: "type[ArchitectureFromFileDescr | dict[str, Any]]", + sha256: Optional[Sha256], + kwargs: Dict[str, Any], + ) -> "ArchitectureFromFileDescr | dict[str, Any]": + if src.startswith("http") and src.count(":") == 2: + http, source, callable_ = src.split(":") + source = ":".join((http, source)) + elif not src.startswith("http") and src.count(":") == 1: + source, callable_ = src.split(":") + else: + source = str(src) + callable_ = str(src) + return tgt( + callable=Identifier(callable_), + source=cast(ImportantFileSource, source), + sha256=sha256, + kwargs=kwargs, + ) + + +_arch_file_conv = _ArchFileConv(_CallableFromFile_v0_4, ArchitectureFromFileDescr) + + +class _ArchLibConv( + Converter[ + _CallableFromDepencency_v0_4, ArchitectureFromLibraryDescr, Dict[str, Any] + ] +): + def _convert( + self, + src: _CallableFromDepencency_v0_4, + tgt: "type[ArchitectureFromLibraryDescr | dict[str, Any]]", + kwargs: Dict[str, Any], + ) -> "ArchitectureFromLibraryDescr | dict[str, Any]": + *mods, callable_ = src.split(".") + import_from = ".".join(mods) + return tgt( + import_from=import_from, callable=Identifier(callable_), kwargs=kwargs + ) + + +_arch_lib_conv = _ArchLibConv( + _CallableFromDepencency_v0_4, ArchitectureFromLibraryDescr +) + + +class WeightsEntryDescrBase(FileDescr): + type: ClassVar[WeightsFormat] + weights_format_name: ClassVar[str] # human readable + + source: ImportantFileSource + """โˆˆ๐Ÿ“ฆ The weights file.""" + + authors: Optional[List[Author]] = None + """Authors + Either the person(s) that have trained this model resulting in the original weights file. + (If this is the initial weights entry, i.e. it does not have a `parent`) + Or the person(s) who have converted the weights to this weights format. + (If this is a child weight, i.e. it has a `parent` field) + """ + + parent: Annotated[ + Optional[WeightsFormat], Field(examples=["pytorch_state_dict"]) + ] = None + """The source weights these weights were converted from. + For example, if a model's weights were converted from the `pytorch_state_dict` format to `torchscript`, + The `pytorch_state_dict` weights entry has no `parent` and is the parent of the `torchscript` weights. + All weight entries except one (the initial set of weights resulting from training the model), + need to have this field.""" + + @model_validator(mode="after") + def check_parent_is_not_self(self) -> Self: + if self.type == self.parent: + raise ValueError("Weights entry can't be it's own parent.") + + return self + + +class KerasHdf5WeightsDescr(WeightsEntryDescrBase): + type = "keras_hdf5" + weights_format_name: ClassVar[str] = "Keras HDF5" + tensorflow_version: Version + """TensorFlow version used to create these weights.""" + + +class OnnxWeightsDescr(WeightsEntryDescrBase): + type = "onnx" + weights_format_name: ClassVar[str] = "ONNX" + opset_version: Annotated[int, Ge(7)] + """ONNX opset version""" + + +class PytorchStateDictWeightsDescr(WeightsEntryDescrBase): + type = "pytorch_state_dict" + weights_format_name: ClassVar[str] = "Pytorch State Dict" + architecture: ArchitectureDescr + pytorch_version: Version + """Version of the PyTorch library used. + If `architecture.depencencies` is specified it has to include pytorch and any version pinning has to be compatible. + """ + dependencies: Optional[EnvironmentFileDescr] = None + """Custom depencies beyond pytorch. + The conda environment file should include pytorch and any version pinning has to be compatible with + `pytorch_version`. + """ + + +class TensorflowJsWeightsDescr(WeightsEntryDescrBase): + type = "tensorflow_js" + weights_format_name: ClassVar[str] = "Tensorflow.js" + tensorflow_version: Version + """Version of the TensorFlow library used.""" + + source: ImportantFileSource + """โˆˆ๐Ÿ“ฆ The multi-file weights. + All required files/folders should be a zip archive.""" + + +class TensorflowSavedModelBundleWeightsDescr(WeightsEntryDescrBase): + type = "tensorflow_saved_model_bundle" + weights_format_name: ClassVar[str] = "Tensorflow Saved Model" + tensorflow_version: Version + """Version of the TensorFlow library used.""" + + dependencies: Optional[EnvironmentFileDescr] = None + """Custom dependencies beyond tensorflow. + Should include tensorflow and any version pinning has to be compatible with `tensorflow_version`.""" + + source: ImportantFileSource + """โˆˆ๐Ÿ“ฆ The multi-file weights. + All required files/folders should be a zip archive.""" + + +class TorchscriptWeightsDescr(WeightsEntryDescrBase): + type = "torchscript" + weights_format_name: ClassVar[str] = "TorchScript" + pytorch_version: Version + """Version of the PyTorch library used.""" + + +class WeightsDescr(Node): + keras_hdf5: Optional[KerasHdf5WeightsDescr] = None + onnx: Optional[OnnxWeightsDescr] = None + pytorch_state_dict: Optional[PytorchStateDictWeightsDescr] = None + tensorflow_js: Optional[TensorflowJsWeightsDescr] = None + tensorflow_saved_model_bundle: Optional[TensorflowSavedModelBundleWeightsDescr] = ( + None + ) + torchscript: Optional[TorchscriptWeightsDescr] = None + + @model_validator(mode="after") + def check_entries(self) -> Self: + entries = {wtype for wtype, entry in self if entry is not None} + + if not entries: + raise ValueError("Missing weights entry") + + entries_wo_parent = { + wtype + for wtype, entry in self + if entry is not None and hasattr(entry, "parent") and entry.parent is None + } + if len(entries_wo_parent) != 1: + issue_warning( + "Exactly one weights entry may not specify the `parent` field (got" + + " {value}).That entry is considered the original set of model weights." + + " Other weight formats are created through conversion of the orignal or" + + " already converted weights. They have to reference the weights format" + + " they were converted from as their `parent`.", + value=len(entries_wo_parent), + ) + + for wtype, entry in self: + if entry is None: + continue + + assert hasattr(entry, "type") + assert hasattr(entry, "parent") + assert wtype == entry.type + if ( + entry.parent is not None and entry.parent not in entries + ): # self reference checked for `parent` field + raise ValueError( + f"`weights.{wtype}.parent={entry.parent} not in specified weight" + + f" formats: {entries}" + ) + + return self + + +class LinkedModel(Node): + """Reference to a bioimage.io model.""" + + id: ModelId + """A valid model `id` from the bioimage.io collection.""" + + version_number: int + """version number (n-th published version, not the semantic version) of linked model""" + + +class ModelDescr(GenericModelDescrBase, title="bioimage.io model specification"): + """Specification of the fields used in a bioimage.io-compliant RDF to describe AI models with pretrained weights. + These fields are typically stored in a YAML file which we call a model resource description file (model RDF). + """ + + format_version: Literal["0.5.0"] = "0.5.0" + """Version of the bioimage.io model description specification used. + When creating a new model always use the latest micro/patch version described here. + The `format_version` is important for any consumer software to understand how to parse the fields. + """ + + type: Literal["model"] = "model" + """Specialized resource type 'model'""" + + id: Optional[ModelId] = None + """Model zoo (bioimage.io) wide, unique identifier (assigned by bioimage.io)""" + + authors: NotEmpty[List[Author]] + """The authors are the creators of the model RDF and the primary points of contact.""" + + documentation: Annotated[ + DocumentationSource, + Field( + examples=[ + "https://raw.githubusercontent.com/bioimage-io/spec-bioimage-io/main/example_specs/models/unet2d_nuclei_broad/README.md", + "README.md", + ], + ), + ] + """โˆˆ๐Ÿ“ฆ URL or relative path to a markdown file with additional documentation. + The recommended documentation file name is `README.md`. An `.md` suffix is mandatory. + The documentation should include a '#[#] Validation' (sub)section + with details on how to quantitatively validate the model on unseen data.""" + + @field_validator("documentation", mode="after") + @classmethod + def _validate_documentation(cls, value: DocumentationSource) -> DocumentationSource: + if not validation_context_var.get().perform_io_checks: + return value + + doc_path = download(value).path + doc_content = doc_path.read_text() + if not re.match("#.*[vV]alidation", doc_content): + issue_warning( + "No '# Validation' (sub)section found in {value}.", value=value + ) + + return value + + inputs: NotEmpty[Sequence[InputTensorDescr]] + """Describes the input tensors expected by this model.""" + + @field_validator("inputs", mode="after") + @classmethod + def _validate_input_axes( + cls, inputs: Sequence[InputTensorDescr] + ) -> Sequence[InputTensorDescr]: + input_size_refs = cls._get_axes_with_independent_size(inputs) + + for i, ipt in enumerate(inputs): + valid_independent_refs: Dict[ + Tuple[TensorId, AxisId], + Tuple[TensorDescr, AnyAxis, Union[int, ParameterizedSize]], + ] = { + **{ + (ipt.id, a.id): (ipt, a, a.size) + for a in ipt.axes + if not isinstance(a, BatchAxis) + and isinstance(a.size, (int, ParameterizedSize)) + }, + **input_size_refs, + } + for a, ax in enumerate(ipt.axes): + cls._validate_axis( + "inputs", + i=i, + tensor_id=ipt.id, + a=a, + axis=ax, + valid_independent_refs=valid_independent_refs, + ) + return inputs + + @staticmethod + def _validate_axis( + field_name: str, + i: int, + tensor_id: TensorId, + a: int, + axis: AnyAxis, + valid_independent_refs: Dict[ + Tuple[TensorId, AxisId], + Tuple[TensorDescr, AnyAxis, Union[int, ParameterizedSize]], + ], + ): + if isinstance(axis, BatchAxis) or isinstance(axis.size, int): + return + + if isinstance(axis.size, ParameterizedSize): + if isinstance(axis, WithHalo) and (axis.size.min - 2 * axis.halo) < 1: + raise ValueError( + f"axis {axis.id} with minimum size {axis.size.min} is too small for" + + f" halo {axis.halo}." + ) + + elif isinstance(axis.size, SizeReference): + ref = (axis.size.tensor_id, axis.size.axis_id) + if ref not in valid_independent_refs: + raise ValueError( + "Invalid tensor axis reference at" + + f" {field_name}[{i}].axes[{a}].size: {axis.size}." + ) + if ref == (tensor_id, axis.id): + raise ValueError( + "Self-referencing not allowed for" + + f" {field_name}[{i}].axes[{a}].size: {axis.size}" + ) + if axis.type == "channel": + if valid_independent_refs[ref][1].type != "channel": + raise ValueError( + "A channel axis' size may only reference another fixed size" + + " channel axis." + ) + if isinstance(axis.channel_names, str) and "{i}" in axis.channel_names: + ref_size = valid_independent_refs[ref][2] + assert isinstance(ref_size, int), ( + "channel axis ref (another channel axis) has to specify fixed" + + " size" + ) + generated_channel_names = [ + Identifier(axis.channel_names.format(i=i)) + for i in range(1, ref_size + 1) + ] + axis.channel_names = generated_channel_names + + if (ax_unit := getattr(axis, "unit", None)) != ( + ref_unit := getattr(valid_independent_refs[ref][1], "unit", None) + ): + raise ValueError( + "The units of an axis and its reference axis need to match, but" + + f" '{ax_unit}' != '{ref_unit}'." + ) + min_size = valid_independent_refs[ref][2] + if isinstance(min_size, ParameterizedSize): + min_size = min_size.min + + if isinstance(axis, WithHalo) and (min_size - 2 * axis.halo) < 1: + raise ValueError( + f"axis {axis.id} with minimum size {min_size} is too small for halo" + + f" {axis.halo}." + ) + + else: + assert_never(axis.size) + + @model_validator(mode="after") + def _validate_test_tensors(self) -> Self: + if not validation_context_var.get().perform_io_checks: + return self + + test_arrays = [ + load_array(descr.test_tensor.download().path) + for descr in chain(self.inputs, self.outputs) + ] + tensors = { + descr.id: (descr, array) + for descr, array in zip(chain(self.inputs, self.outputs), test_arrays) + } + validate_tensors(tensors, tensor_origin="test_tensor") + return self + + @model_validator(mode="after") + def _validate_tensor_references_in_proc_kwargs(self, info: ValidationInfo) -> Self: + ipt_refs = {t.id for t in self.inputs} + out_refs = {t.id for t in self.outputs} + for ipt in self.inputs: + for p in ipt.preprocessing: + ref = p.kwargs.get("reference_tensor") + if ref is None: + continue + if ref not in ipt_refs: + raise ValueError( + f"`reference_tensor` '{ref}' not found. Valid input tensor" + + f" references are: {ipt_refs}." + ) + + for out in self.outputs: + for p in out.postprocessing: + ref = p.kwargs.get("reference_tensor") + if ref is None: + continue + + if ref not in ipt_refs and ref not in out_refs: + raise ValueError( + f"`reference_tensor` '{ref}' not found. Valid tensor references" + + f" are: {ipt_refs | out_refs}." + ) + + return self + + # TODO: use validate funcs in validate_test_tensors + # def validate_inputs(self, input_tensors: Mapping[TensorId, NDArray[Any]]) -> Mapping[TensorId, NDArray[Any]]: + + name: Annotated[ + str, + MinLen(5), + warn(MaxLen(64), "Name longer than 64 characters.", INFO), + ] + """A human-readable name of this model. + It should be no longer than 64 characters + and may only contain letter, number, underscore, minus or space characters.""" + + outputs: NotEmpty[Sequence[OutputTensorDescr]] + """Describes the output tensors.""" + + @field_validator("outputs", mode="after") + @classmethod + def _validate_tensor_ids( + cls, outputs: Sequence[OutputTensorDescr], info: ValidationInfo + ) -> Sequence[OutputTensorDescr]: + tensor_ids = [ + t.id for t in info.data.get("inputs", []) + info.data.get("outputs", []) + ] + duplicate_tensor_ids: List[str] = [] + seen: Set[str] = set() + for t in tensor_ids: + if t in seen: + duplicate_tensor_ids.append(t) + + seen.add(t) + + if duplicate_tensor_ids: + raise ValueError(f"Duplicate tensor ids: {duplicate_tensor_ids}") + + return outputs + + @staticmethod + def _get_axes_with_parameterized_size( + io: Union[Sequence[InputTensorDescr], Sequence[OutputTensorDescr]], + ): + return { + f"{t.id}.{a.id}": (t, a, a.size) + for t in io + for a in t.axes + if not isinstance(a, BatchAxis) and isinstance(a.size, ParameterizedSize) + } + + @staticmethod + def _get_axes_with_independent_size( + io: Union[Sequence[InputTensorDescr], Sequence[OutputTensorDescr]], + ): + return { + (t.id, a.id): (t, a, a.size) + for t in io + for a in t.axes + if not isinstance(a, BatchAxis) + and isinstance(a.size, (int, ParameterizedSize)) + } + + @field_validator("outputs", mode="after") + @classmethod + def _validate_output_axes( + cls, outputs: List[OutputTensorDescr], info: ValidationInfo + ) -> List[OutputTensorDescr]: + input_size_refs = cls._get_axes_with_independent_size( + info.data.get("inputs", []) + ) + output_size_refs = cls._get_axes_with_independent_size(outputs) + + for i, out in enumerate(outputs): + valid_independent_refs: Dict[ + Tuple[TensorId, AxisId], + Tuple[TensorDescr, AnyAxis, Union[int, ParameterizedSize]], + ] = { + **{ + (out.id, a.id): (out, a, a.size) + for a in out.axes + if not isinstance(a, BatchAxis) + and isinstance(a.size, (int, ParameterizedSize)) + }, + **input_size_refs, + **output_size_refs, + } + for a, ax in enumerate(out.axes): + cls._validate_axis( + "outputs", + i, + out.id, + a, + ax, + valid_independent_refs=valid_independent_refs, + ) + + return outputs + + packaged_by: List[Author] = Field(default_factory=list) + """The persons that have packaged and uploaded this model. + Only required if those persons differ from the `authors`.""" + + parent: Optional[LinkedModel] = None + """The model from which this model is derived, e.g. by fine-tuning the weights.""" + + # todo: add parent self check once we have `id` + # @model_validator(mode="after") + # def validate_parent_is_not_self(self) -> Self: + # if self.parent is not None and self.parent == self.id: + # raise ValueError("The model may not reference itself as parent model") + + # return self + + run_mode: Annotated[ + Optional[RunMode], + warn(None, "Run mode '{value}' has limited support across consumer softwares."), + ] = None + """Custom run mode for this model: for more complex prediction procedures like test time + data augmentation that currently cannot be expressed in the specification. + No standard run modes are defined yet.""" + + timestamp: Datetime = Datetime(datetime.now()) + """Timestamp in [ISO 8601](#https://en.wikipedia.org/wiki/ISO_8601) format + with a few restrictions listed [here](https://docs.python.org/3/library/datetime.html#datetime.datetime.fromisoformat). + (In Python a datetime object is valid, too).""" + + training_data: Union[LinkedDataset, DatasetDescr, None] = None + """The dataset used to train this model""" + + weights: WeightsDescr + """The weights for this model. + Weights can be given for different formats, but should otherwise be equivalent. + The available weight formats determine which consumers can use this model.""" + + @model_validator(mode="after") + def _add_default_cover(self) -> Self: + if not validation_context_var.get().perform_io_checks or self.covers: + return self + + try: + generated_covers = generate_covers( + [(t, load_array(t.test_tensor.download().path)) for t in self.inputs], + [(t, load_array(t.test_tensor.download().path)) for t in self.outputs], + ) + except Exception as e: + issue_warning( + "Failed to generate cover image(s): {e}", + value=self.covers, + msg_context=dict(e=e), + ) + else: + self.covers.extend(generated_covers) + + return self + + def get_input_test_arrays(self) -> List[NDArray[Any]]: + data = [load_array(ipt.test_tensor.download().path) for ipt in self.inputs] + assert all(isinstance(d, np.ndarray) for d in data) + return data + + def get_output_test_arrays(self) -> List[NDArray[Any]]: + data = [load_array(out.test_tensor.download().path) for out in self.outputs] + assert all(isinstance(d, np.ndarray) for d in data) + return data + + def get_tensor_sizes( + self, n: ParameterizedSize.N, batch_size: int + ) -> Dict[TensorId, Dict[AxisId, int]]: + all_axes = { + t.id: {a.id: a for a in t.axes} for t in chain(self.inputs, self.outputs) + } + + ret: Dict[TensorId, Dict[AxisId, int]] = {} + for t_descr in chain(self.inputs, self.outputs): + ret[t_descr.id] = {} + for a in t_descr.axes: + if a.size is None: + assert isinstance(a, BatchAxis) + s = batch_size + elif isinstance(a.size, int): + s = a.size + elif isinstance(a.size, ParameterizedSize): + s = a.size.get_size(n) + elif isinstance(a.size, SizeReference): + assert not isinstance(a, BatchAxis) + ref_axis = all_axes[a.size.tensor_id][a.size.axis_id] + assert not isinstance(ref_axis, BatchAxis) + s = a.size.get_size(axis=a, ref_axis=ref_axis, n=n) + else: + assert_never(a.size) + + ret[t_descr.id][a.id] = s + + return ret + + @model_validator(mode="before") + @classmethod + def _convert(cls, data: Dict[str, Any]) -> Dict[str, Any]: + if ( + data.get("type") == "model" + and isinstance(fv := data.get("format_version"), str) + and (fv.startswith("0.3.") or fv.startswith("0.4.")) + ): + m04 = _ModelDescr_v0_4.load(data) + if not isinstance(m04, InvalidDescr): + return _model_conv.convert_as_dict(m04) + + return data + + +class _ModelConv(Converter[_ModelDescr_v0_4, ModelDescr]): + def _convert( + self, src: _ModelDescr_v0_4, tgt: "type[ModelDescr] | type[dict[str, Any]]" + ) -> "ModelDescr | dict[str, Any]": + def conv_authors(auths: Optional[Sequence[_Author_v0_4]]): + conv = ( + _author_conv.convert if TYPE_CHECKING else _author_conv.convert_as_dict + ) + return None if auths is None else [conv(a) for a in auths] + + if TYPE_CHECKING: + arch_file_conv = _arch_file_conv.convert + arch_lib_conv = _arch_lib_conv.convert + else: + arch_file_conv = _arch_file_conv.convert_as_dict + arch_lib_conv = _arch_lib_conv.convert_as_dict + + input_size_refs = { + ipt.name: { + a: s + for a, s in zip( + ipt.axes, + ( + ipt.shape.min + if isinstance(ipt.shape, _ParameterizedInputShape_v0_4) + else ipt.shape + ), + ) + } + for ipt in src.inputs + if ipt.shape + } + output_size_refs = { + **{ + out.name: {a: s for a, s in zip(out.axes, out.shape)} + for out in src.outputs + if not isinstance(out.shape, _ImplicitOutputShape_v0_4) + }, + **input_size_refs, + } + + return tgt( + attachments=( + [] + if src.attachments is None + else [FileDescr(source=f) for f in src.attachments.files] + ), + authors=[ + _author_conv.convert_as_dict(a) for a in src.authors + ], # pyright: ignore[reportArgumentType] + cite=[ + {"text": c.text, "doi": c.doi, "url": c.url} for c in src.cite + ], # pyright: ignore[reportArgumentType] + config=src.config, + covers=src.covers, + description=src.description, + documentation=src.documentation, # pyright: ignore[reportArgumentType] + format_version="0.5.0", + git_repo=src.git_repo, # pyright: ignore[reportArgumentType] + icon=src.icon, + id=src.id, + id_emoji=src.id_emoji, + license=src.license, # type: ignore + links=src.links, + maintainers=[ + _maintainer_conv.convert_as_dict(m) for m in src.maintainers + ], # pyright: ignore[reportArgumentType] + name=src.name, + tags=src.tags, + type=src.type, + uploader=src.uploader, + version=src.version, + inputs=[ # pyright: ignore[reportArgumentType] + _input_tensor_conv.convert_as_dict(ipt, tt, st, input_size_refs) + for ipt, tt, st, in zip( + src.inputs, + src.test_inputs, + src.sample_inputs or [None] * len(src.test_inputs), + ) + ], + outputs=[ # pyright: ignore[reportArgumentType] + _output_tensor_conv.convert_as_dict(out, tt, st, output_size_refs) + for out, tt, st, in zip( + src.outputs, + src.test_outputs, + src.sample_outputs or [None] * len(src.test_outputs), + ) + ], + weights=(WeightsDescr if TYPE_CHECKING else dict)( + keras_hdf5=(w := src.weights.keras_hdf5) + and (KerasHdf5WeightsDescr if TYPE_CHECKING else dict)( + authors=conv_authors(w.authors), + source=w.source, + tensorflow_version=w.tensorflow_version or Version("1.15"), + parent=w.parent, + ), + onnx=(w := src.weights.onnx) + and (OnnxWeightsDescr if TYPE_CHECKING else dict)( + source=w.source, + authors=conv_authors(w.authors), + parent=w.parent, + opset_version=w.opset_version or 15, + ), + pytorch_state_dict=(w := src.weights.pytorch_state_dict) + and (PytorchStateDictWeightsDescr if TYPE_CHECKING else dict)( + source=w.source, + authors=conv_authors(w.authors), + parent=w.parent, + architecture=( + arch_file_conv( + w.architecture, + w.architecture_sha256, + w.kwargs, + ) + if isinstance(w.architecture, _CallableFromFile_v0_4) + else arch_lib_conv(w.architecture, w.kwargs) + ), + pytorch_version=w.pytorch_version or Version("1.10"), + dependencies=( + None + if w.dependencies is None + else (EnvironmentFileDescr if TYPE_CHECKING else dict)( + source=cast( + ImportantFileSource, + str(deps := w.dependencies)[ + ( + len("conda:") + if str(deps).startswith("conda:") + else 0 + ) : + ], + ) + ) + ), + ), + tensorflow_js=(w := src.weights.tensorflow_js) + and (TensorflowJsWeightsDescr if TYPE_CHECKING else dict)( + source=w.source, + authors=conv_authors(w.authors), + parent=w.parent, + tensorflow_version=w.tensorflow_version or Version("1.15"), + ), + tensorflow_saved_model_bundle=( + w := src.weights.tensorflow_saved_model_bundle + ) + and (TensorflowSavedModelBundleWeightsDescr if TYPE_CHECKING else dict)( + authors=conv_authors(w.authors), + parent=w.parent, + source=w.source, + tensorflow_version=w.tensorflow_version or Version("1.15"), + dependencies=( + None + if w.dependencies is None + else (EnvironmentFileDescr if TYPE_CHECKING else dict)( + source=cast( + ImportantFileSource, + ( + str(w.dependencies)[len("conda:") :] + if str(w.dependencies).startswith("conda:") + else str(w.dependencies) + ), + ) + ) + ), + ), + torchscript=(w := src.weights.torchscript) + and (TorchscriptWeightsDescr if TYPE_CHECKING else dict)( + source=w.source, + authors=conv_authors(w.authors), + parent=w.parent, + pytorch_version=w.pytorch_version or Version("1.10"), + ), + ), + ) + + +_model_conv = _ModelConv(_ModelDescr_v0_4, ModelDescr) + + +# create better cover images for 3d data and non-image outputs +def generate_covers( + inputs: Sequence[Tuple[InputTensorDescr, NDArray[Any]]], + outputs: Sequence[Tuple[OutputTensorDescr, NDArray[Any]]], +) -> List[Path]: + def squeeze( + data: NDArray[Any], axes: Sequence[AnyAxis] + ) -> Tuple[NDArray[Any], List[AnyAxis]]: + """apply numpy.ndarray.squeeze while keeping track of the axis descriptions remaining""" + if data.ndim != len(axes): + raise ValueError( + f"tensor shape {data.shape} does not match described axes" + + f" {[a.id for a in axes]}" + ) + + axes = [deepcopy(a) for a, s in zip(axes, data.shape) if s != 1] + return data.squeeze(), axes + + def normalize( + data: NDArray[Any], axis: Optional[Tuple[int, ...]], eps: float = 1e-7 + ) -> NDArray[np.float32]: + data = data.astype("float32") + data -= data.min(axis=axis, keepdims=True) + data /= data.max(axis=axis, keepdims=True) + eps + return data + + def to_2d_image(data: NDArray[Any], axes: Sequence[AnyAxis]): + original_shape = data.shape + data, axes = squeeze(data, axes) + + # take slice fom any batch or index axis if needed + # and convert the first channel axis and take a slice from any additional channel axes + slices: Tuple[slice, ...] = () + ndim = data.ndim + ndim_need = 3 if any(isinstance(a, ChannelAxis) for a in axes) else 2 + has_c_axis = False + for i, a in enumerate(axes): + s = data.shape[i] + assert s > 1 + if isinstance(a, (BatchAxis, IndexAxis)) and ndim > ndim_need: + data = data[slices + (slice(s // 2 - 1, s // 2),)] + ndim -= 1 + elif isinstance(a, ChannelAxis): + if has_c_axis: + # second channel axis + data = data[slices + (slice(0, 1),)] + ndim -= 1 + else: + has_c_axis = True + if s == 2: + # visualize two channels with cyan and magenta + data = np.concatenate( + [ + data[slices + (slice(1, 2),)], + data[slices + (slice(0, 1),)], + ( + data[slices + (slice(0, 1),)] + + data[slices + (slice(1, 2),)] + ) + / 2, # TODO: take maximum instead? + ], + axis=i, + ) + elif data.shape[i] == 3: + pass # visualize 3 channels as RGB + else: + # visualize first 3 channels as RGB + data = data[slices + (slice(3),)] + + assert data.shape[i] == 3 + + slices += (slice(None),) # type: ignore + + data, axes = squeeze(data, axes) + assert len(axes) == ndim + # take slice from z axis if needed + slices = () + if ndim > ndim_need: + for i, a in enumerate(axes): + s = data.shape[i] + if a.id.root == "z": + data = data[slices + (slice(s // 2 - 1, s // 2),)] + data, axes = squeeze(data, axes) + ndim -= 1 + break + + slices += (slice(None),) + + # take slice from any space or time axis + slices = () + + for i, a in enumerate(axes): + if ndim <= ndim_need: + break + + s = data.shape[i] + assert s > 1 + if isinstance( + a, (SpaceInputAxis, SpaceOutputAxis, TimeInputAxis, TimeOutputAxis) + ): + data = data[slices + (slice(s // 2 - 1, s // 2),)] + ndim -= 1 + + slices += (slice(None),) # type: ignore + + del slices + data, axes = squeeze(data, axes) + assert len(axes) == ndim + + if (has_c_axis and ndim != 3) or ndim != 2: + raise ValueError( + f"Failed to construct cover image from shape {original_shape}" + ) + + if not has_c_axis: + assert ndim == 2 + data = np.repeat(data[:, :, None], 3, axis=2) + axes.append(ChannelAxis(channel_names=list(map(Identifier, "RGB")))) + ndim += 1 + + assert ndim == 3 + + # transpose axis order such that longest axis comes first... + axis_order = list(np.argsort(list(data.shape))) + axis_order.reverse() + # ... and channel axis is last + c = [i for i in range(3) if isinstance(axes[i], ChannelAxis)][0] + axis_order.append(axis_order.pop(c)) + axes = [axes[ao] for ao in axis_order] + data = data.transpose(axis_order) + + # h, w = data.shape[:2] + # if h / w in (1.0 or 2.0): + # pass + # elif h / w < 2: + # TODO: enforce 2:1 or 1:1 aspect ratio for generated cover images + + norm_along = ( + tuple(i for i, a in enumerate(axes) if a.type in ("space", "time")) or None + ) + # normalize the data and map to 8 bit + data = normalize(data, norm_along) + data = (data * 255).astype("uint8") + + return data + + def create_diagonal_split_image(im0: NDArray[Any], im1: NDArray[Any]): + assert im0.dtype == im1.dtype == np.uint8 + assert im0.shape == im1.shape + assert im0.ndim == 3 + N, M, C = im0.shape + assert C == 3 + out = np.ones((N, M, C), dtype="uint8") + for c in range(C): + outc = np.tril(im0[..., c]) + mask = outc == 0 + outc[mask] = np.triu(im1[..., c])[mask] + out[..., c] = outc + + return out + + ipt_descr, ipt = inputs[0] + out_descr, out = outputs[0] + + ipt_img = to_2d_image(ipt, ipt_descr.axes) + out_img = to_2d_image(out, out_descr.axes) + + cover_folder = Path(mkdtemp()) + if ipt_img.shape == out_img.shape: + covers = [cover_folder / "cover.png"] + imageio.imwrite(covers[0], create_diagonal_split_image(ipt_img, out_img)) + else: + covers = [cover_folder / "input.png", cover_folder / "output.png"] + imageio.imwrite(covers[0], ipt_img) + imageio.imwrite(covers[1], out_img) + + return covers diff --git a/bioimageio/spec/notebook/__init__.py b/bioimageio/spec/notebook/__init__.py new file mode 100644 index 000000000..b865394fb --- /dev/null +++ b/bioimageio/spec/notebook/__init__.py @@ -0,0 +1,20 @@ +# autogen: start +""" +implementaions of all released minor versions are available in submodules: +- notebook v0_2: `bioimageio.spec.notebook.v0_2.NotebookDescr` [user documentation](../../../user_docs/notebook_descr_v0-2.md) +- notebook v0_3: `bioimageio.spec.notebook.v0_3.NotebookDescr` [user documentation](../../../user_docs/notebook_descr_v0-3.md) +""" +from typing import Union + +from pydantic import Discriminator +from typing_extensions import Annotated + +from .v0_2 import NotebookDescr as NotebookDescr_v0_2 +from .v0_3 import NotebookDescr as NotebookDescr +from .v0_3 import NotebookDescr as NotebookDescr_v0_3 + +AnyNotebookDescr = Annotated[ + Union[NotebookDescr_v0_2, NotebookDescr_v0_3], Discriminator("format_version") +] +"""Union of any released notebook desription""" +# autogen: stop diff --git a/bioimageio/spec/notebook/v0_2.py b/bioimageio/spec/notebook/v0_2.py new file mode 100644 index 000000000..0c69a12bf --- /dev/null +++ b/bioimageio/spec/notebook/v0_2.py @@ -0,0 +1,51 @@ +from typing import Literal, Optional, Union + +from typing_extensions import Annotated + +from .._internal.common_nodes import Node +from .._internal.io import WithSuffix +from .._internal.io_basics import AbsoluteFilePath as AbsoluteFilePath +from .._internal.types import NotebookId as NotebookId +from .._internal.url import HttpUrl +from ..generic.v0_2 import AttachmentsDescr as AttachmentsDescr +from ..generic.v0_2 import Author as Author +from ..generic.v0_2 import BadgeDescr as BadgeDescr +from ..generic.v0_2 import CiteEntry as CiteEntry +from ..generic.v0_2 import Doi as Doi +from ..generic.v0_2 import GenericDescrBase +from ..generic.v0_2 import LinkedResource as LinkedResource +from ..generic.v0_2 import Maintainer as Maintainer +from ..generic.v0_2 import OrcidId as OrcidId +from ..generic.v0_2 import RelativeFilePath as RelativeFilePath +from ..generic.v0_2 import ResourceId as ResourceId +from ..generic.v0_2 import Uploader as Uploader +from ..generic.v0_2 import Version as Version + +_WithNotebookSuffix = WithSuffix(".ipynb", case_sensitive=True) +NotebookSource = Union[ + Annotated[HttpUrl, _WithNotebookSuffix], + Annotated[AbsoluteFilePath, _WithNotebookSuffix], + Annotated[RelativeFilePath, _WithNotebookSuffix], +] + + +class NotebookDescr(GenericDescrBase, title="bioimage.io notebook specification"): + """Bioimage.io description of a Jupyter Notebook.""" + + type: Literal["notebook"] = "notebook" + + id: Optional[NotebookId] = None + """Model zoo (bioimage.io) wide, unique identifier (assigned by bioimage.io)""" + + source: NotebookSource + """The Jupyter notebook""" + + +class LinkedNotebook(Node): + """Reference to a bioimage.io notebook.""" + + id: NotebookId + """A valid notebook `id` from the bioimage.io collection.""" + + version_number: Optional[int] = None + """version number (n-th published version, not the semantic version) of linked notebook""" diff --git a/bioimageio/spec/notebook/v0_3.py b/bioimageio/spec/notebook/v0_3.py new file mode 100644 index 000000000..7ff503413 --- /dev/null +++ b/bioimageio/spec/notebook/v0_3.py @@ -0,0 +1,46 @@ +from typing import Literal, Optional + +from .._internal.common_nodes import Node +from .._internal.io import FileDescr as FileDescr +from .._internal.io import Sha256 as Sha256 +from .._internal.io_basics import AbsoluteFilePath as AbsoluteFilePath +from .._internal.types import NotebookId as NotebookId +from .._internal.url import HttpUrl as HttpUrl +from ..generic.v0_3 import Author as Author +from ..generic.v0_3 import BadgeDescr as BadgeDescr +from ..generic.v0_3 import CiteEntry as CiteEntry +from ..generic.v0_3 import Doi as Doi +from ..generic.v0_3 import GenericDescrBase +from ..generic.v0_3 import LinkedResource as LinkedResource +from ..generic.v0_3 import Maintainer as Maintainer +from ..generic.v0_3 import OrcidId as OrcidId +from ..generic.v0_3 import RelativeFilePath as RelativeFilePath +from ..generic.v0_3 import ResourceId as ResourceId +from ..generic.v0_3 import Uploader as Uploader +from ..generic.v0_3 import Version as Version +from .v0_2 import NotebookSource as NotebookSource + + +class NotebookDescr(GenericDescrBase, title="bioimage.io notebook specification"): + """Bioimage.io description of a Jupyter notebook.""" + + type: Literal["notebook"] = "notebook" + + id: Optional[NotebookId] = None + """Model zoo (bioimage.io) wide, unique identifier (assigned by bioimage.io)""" + + parent: Optional[NotebookId] = None + """The description from which this one is derived""" + + source: NotebookSource + """The Jupyter notebook""" + + +class LinkedNotebook(Node): + """Reference to a bioimage.io notebook.""" + + id: NotebookId + """A valid notebook `id` from the bioimage.io collection.""" + + version_number: int + """version number (n-th published version, not the semantic version) of linked notebook""" diff --git a/bioimageio/spec/partner/__init__.py b/bioimageio/spec/partner/__init__.py deleted file mode 100644 index 0bc42ad26..000000000 --- a/bioimageio/spec/partner/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -"""bioimageio.spec extensions for BioImage.IO community partners""" -# todo: consider moving to its own module, e.g. bioimageio.partner -# todo: if not moving to its own module, add the dependency to a 'partner' extra in setup.py diff --git a/bioimageio/spec/partner/utils.py b/bioimageio/spec/partner/utils.py deleted file mode 100644 index 38ba5cb34..000000000 --- a/bioimageio/spec/partner/utils.py +++ /dev/null @@ -1,48 +0,0 @@ -import warnings -from pathlib import Path -from typing import Any, Dict, Union - -from bioimageio.spec.shared import resolve_rdf_source -from .imjoy_plugin_parser import get_plugin_as_rdf # type: ignore -from ..shared.raw_nodes import URI - - -def enrich_partial_rdf_with_imjoy_plugin(partial_rdf: Dict[str, Any], root: Union[URI, Path]) -> Dict[str, Any]: - """ - a (partial) rdf may have 'rdf_resource' or 'source' which resolve to rdf data that may be overwritten. - Due to resolving imjoy plugins this is not done in bioimageio.spec.collection atm - """ - - enriched_rdf = {} - if "rdf_source" in partial_rdf: - rdf_source = partial_rdf["rdf_source"] - if isinstance(rdf_source, str) and rdf_source.split("?")[0].endswith(".imjoy.html"): - # rdf_source is an imjoy plugin - rdf_source = dict(get_plugin_as_rdf(rdf_source)) - - else: - # rdf_source is an actual rdf - if not isinstance(rdf_source, dict): - try: - rdf_source, rdf_source_name, rdf_source_root = resolve_rdf_source(rdf_source) - except Exception as e: - try: - rdf_source, rdf_source_name, rdf_source_root = resolve_rdf_source(root / rdf_source) - except Exception as ee: - rdf_source = {} - warnings.warn(f"Failed to resolve `rdf_source`: 1. {e}\n2. {ee}") - else: - rdf_source["root_path"] = rdf_source_root # enables remote source content to be resolved - else: - rdf_source["root_path"] = rdf_source_root # enables remote source content to be resolved - - assert isinstance(rdf_source, dict) - enriched_rdf.update(rdf_source) - - if "source" in partial_rdf: - if partial_rdf["source"].split("?")[0].endswith(".imjoy.html"): - rdf_from_source = get_plugin_as_rdf(partial_rdf["source"]) - enriched_rdf.update(rdf_from_source) - - enriched_rdf.update(partial_rdf) # initial partial rdf overwrites fields from rdf_source or source - return enriched_rdf diff --git a/bioimageio/spec/partner_utils/__init__.py b/bioimageio/spec/partner_utils/__init__.py new file mode 100644 index 000000000..ff2875071 --- /dev/null +++ b/bioimageio/spec/partner_utils/__init__.py @@ -0,0 +1,3 @@ +"""bioimageio.spec extensions for bioimage.io community partners""" + +# todo: if not moving to its own module, add the dependency to a 'partner' extra in setup.py diff --git a/bioimageio/spec/partner_utils/imjoy/__init__.py b/bioimageio/spec/partner_utils/imjoy/__init__.py new file mode 100644 index 000000000..6a394361e --- /dev/null +++ b/bioimageio/spec/partner_utils/imjoy/__init__.py @@ -0,0 +1,3 @@ +from ._plugin_parser import enrich_partial_rdf_with_imjoy_plugin, get_plugin_as_rdf + +__all__ = ["enrich_partial_rdf_with_imjoy_plugin", "get_plugin_as_rdf"] diff --git a/bioimageio/spec/partner/imjoy_plugin_parser.py b/bioimageio/spec/partner_utils/imjoy/_plugin_parser.py similarity index 57% rename from bioimageio/spec/partner/imjoy_plugin_parser.py rename to bioimageio/spec/partner_utils/imjoy/_plugin_parser.py index 85a0e816f..46bbd747a 100644 --- a/bioimageio/spec/partner/imjoy_plugin_parser.py +++ b/bioimageio/spec/partner_utils/imjoy/_plugin_parser.py @@ -3,11 +3,17 @@ import copy import json import uuid +import warnings +from pathlib import Path +from typing import Any, Callable, Dict, Tuple, Union +from urllib.parse import urljoin import requests from lxml import etree +from pydantic import DirectoryPath, FilePath, HttpUrl +from ruyaml import YAML -from bioimageio.spec.shared import yaml +yaml = YAML(typ="safe") tag_types = ["config", "script", "link", "window", "style", "docs", "attachment"] @@ -59,10 +65,14 @@ def parse_imjoy_plugin(source, overwrite_config=None): elif plugin_comp.config[0].attrs.lang == "json": config = json.loads(plugin_comp.config[0].content) else: - raise Exception("Unsupported config language: " + plugin_comp.config[0].attrs.lang) + raise Exception( + "Unsupported config language: " + plugin_comp.config[0].attrs.lang + ) overwrite_config = overwrite_config or {} - config["tag"] = overwrite_config.get("tag") or (config.get("tags") and config.get("tags")[0]) + config["tag"] = overwrite_config.get("tag") or ( + config.get("tags") and config.get("tags")[0] + ) config["hot_reloading"] = overwrite_config.get("hot_reloading") config["scripts"] = [] # try to match the script with current tag @@ -86,7 +96,9 @@ def parse_imjoy_plugin(source, overwrite_config=None): config["origin"] = overwrite_config.get("origin") config["namespace"] = overwrite_config.get("namespace") config["code"] = source - config["id"] = config.get("name").strip().replace(" ", "_") + "_" + str(uuid.uuid4()) + config["id"] = ( + config.get("name").strip().replace(" ", "_") + "_" + str(uuid.uuid4()) + ) config["runnable"] = config.get("runnable", True) config["requirements"] = config.get("requirements") or [] @@ -96,7 +108,12 @@ def parse_imjoy_plugin(source, overwrite_config=None): if config.get("tag"): config[field] = obj.get(config.get("tag")) if not obj.get(config.get("tag")): - print("WARNING: " + field + " do not contain a tag named: " + config.get("tag")) + print( + "WARNING: " + + field + + " do not contain a tag named: " + + config.get("tag") + ) else: raise Exception("You must use 'tags' with configurable fields.") config["lang"] = config.get("lang") or "javascript" @@ -131,11 +148,11 @@ def convert_config_to_rdf(plugin_config, source_url=None) -> dict: tags.append("bioengine") rdf["tags"] = tags - docs = plugin_config.get("docs") -# if isinstance(docs, dict): -# rdf["documentation"] = docs.get("content") -# elif isinstance(docs, str): -# rdf["documentation"] = docs + # docs = plugin_config.get("docs") + # if isinstance(docs, dict): + # rdf["documentation"] = docs.get("content") + # elif isinstance(docs, str): + # rdf["documentation"] = docs rdf["covers"] = plugin_config.get("cover") # make sure we have a list if not rdf["covers"]: @@ -160,10 +177,73 @@ def convert_config_to_rdf(plugin_config, source_url=None) -> dict: return rdf -def get_plugin_as_rdf(source_url) -> dict: +def get_plugin_as_rdf(source_url: str) -> Dict[Any, Any]: """Get imjoy plugin config in RDF format.""" req = requests.get(source_url) source = req.text plugin_config = parse_imjoy_plugin(source) rdf = convert_config_to_rdf(plugin_config, source_url) return rdf + + +def enrich_partial_rdf_with_imjoy_plugin( + partial_rdf: Dict[str, Any], + root: Union[HttpUrl, DirectoryPath], + resolve_rdf_source: Callable[ + [Union[HttpUrl, FilePath, str]], + Tuple[Dict[str, Any], str, Union[HttpUrl, DirectoryPath]], + ], +) -> Dict[str, Any]: + """ + a (partial) rdf may have 'rdf_source' or 'source' which resolve to rdf data that may be overwritten. + Due to resolving imjoy plugins this is not done in bioimageio.spec.collection atm + """ + + enriched_rdf: Dict[str, Any] = {} + if "rdf_source" in partial_rdf: + given_rdf_src = partial_rdf["rdf_source"] + if isinstance(given_rdf_src, str) and given_rdf_src.split("?")[0].endswith( + ".imjoy.html" + ): + # given_rdf_src is an imjoy plugin + rdf_source = dict(get_plugin_as_rdf(given_rdf_src)) + else: + # given_rdf_src is an actual rdf + if isinstance(given_rdf_src, dict): + rdf_source: Dict[str, Any] = given_rdf_src + else: + try: + rdf_source, _, rdf_source_root = resolve_rdf_source(given_rdf_src) + except Exception as e: + try: + rdf_source, _, rdf_source_root = resolve_rdf_source( + root / given_rdf_src + if isinstance(root, Path) + else urljoin(str(root), given_rdf_src) + ) + except Exception as ee: + rdf_source = {} + warnings.warn( + f"Failed to resolve `rdf_source`: 1. {e}\n2. {ee}" + ) + else: + rdf_source["root_path"] = ( + rdf_source_root # enables remote source content to be resolved + ) + else: + rdf_source["root_path"] = ( + rdf_source_root # enables remote source content to be resolved + ) + + assert isinstance(rdf_source, dict) + enriched_rdf.update(rdf_source) + + if "source" in partial_rdf: + if partial_rdf["source"].split("?")[0].endswith(".imjoy.html"): + rdf_from_source = get_plugin_as_rdf(partial_rdf["source"]) + enriched_rdf.update(rdf_from_source) + + enriched_rdf.update( + partial_rdf + ) # initial partial rdf overwrites fields from rdf_source or source + return enriched_rdf diff --git a/bioimageio/spec/pretty_validation_errors.py b/bioimageio/spec/pretty_validation_errors.py new file mode 100644 index 000000000..3d7e969ba --- /dev/null +++ b/bioimageio/spec/pretty_validation_errors.py @@ -0,0 +1,78 @@ +from pprint import pformat +from types import TracebackType +from typing import Any, List, Type, Union + +from pydantic import ValidationError + +from .summary import format_loc + +try: + from IPython.core.getipython import get_ipython + from IPython.core.interactiveshell import InteractiveShell + + class PrettyValidationError(ValueError): + """Wrap a pydantic.ValidationError to custumize formatting.""" + + def __init__(self, validation_error: ValidationError): + super().__init__() + self.error = validation_error + + def __str__(self): + errors: List[str] = [] + for e in self.error.errors(include_url=False): + ipt_lines = pformat( + e["input"], sort_dicts=False, depth=1, compact=True, width=30 + ).split("\n") + if len(ipt_lines) > 2: + ipt_lines[1:-1] = ["..."] + + ipt = " ".join([il.strip() for il in ipt_lines]) + + errors.append( + f"\n{format_loc(e['loc'])}\n {e['msg']} [type={e['type']}," + + f" input={ipt}]" + ) + + return ( + f"{self.error.error_count()} validation errors for" + f" {self.error.title}:{''.join(errors)}" + ) + + def _custom_exception_handler( + self: InteractiveShell, + etype: Type[ValidationError], + evalue: ValidationError, + tb: TracebackType, + tb_offset: Any = None, + ): + assert issubclass(etype, ValidationError), type(etype) + assert isinstance(evalue, ValidationError), type(etype) + + stb: Union[List[Union[str, Any]], Any] = self.InteractiveTB.structured_traceback( # type: ignore + etype, PrettyValidationError(evalue), tb, tb_offset=tb_offset + ) + if isinstance(stb, list): + orig_stb = list(stb) + for line in orig_stb: + if ( + isinstance(line, str) + and "pydantic" in line + and "__tracebackhide__" in line + ): + # ignore pydantic internal frame in traceback + continue + stb.append(line) + + self._showtraceback(etype, PrettyValidationError(evalue), stb) # type: ignore + + def enable_pretty_validation_errors_in_ipynb(): + """A modestly hacky way to display prettified validaiton error messages and traceback + in interactive Python notebooks""" + ipy = get_ipython() + if ipy is not None: + ipy.set_custom_exc((ValidationError,), _custom_exception_handler) + +except ImportError: + + def enable_pretty_validation_errors_in_ipynb(): + return diff --git a/example_specs/models/upsample_test_model/docs.md b/bioimageio/spec/py.typed similarity index 100% rename from example_specs/models/upsample_test_model/docs.md rename to bioimageio/spec/py.typed diff --git a/bioimageio/spec/rdf/__init__.py b/bioimageio/spec/rdf/__init__.py deleted file mode 100644 index d8a7cc745..000000000 --- a/bioimageio/spec/rdf/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -from . import v0_2 - -# autogen: start -from . import converters, raw_nodes, schema, utils -from .raw_nodes import FormatVersion - -try: - from typing import get_args -except ImportError: - from typing_extensions import get_args # type: ignore - -format_version = get_args(FormatVersion)[-1] - -# autogen: stop diff --git a/bioimageio/spec/rdf/converters.py b/bioimageio/spec/rdf/converters.py deleted file mode 100644 index b296f1351..000000000 --- a/bioimageio/spec/rdf/converters.py +++ /dev/null @@ -1,3 +0,0 @@ -# Auto-generated by generate_passthrough_modules.py - do not modify - -from .v0_2.converters import * diff --git a/bioimageio/spec/rdf/raw_nodes.py b/bioimageio/spec/rdf/raw_nodes.py deleted file mode 100644 index bb25c3c1f..000000000 --- a/bioimageio/spec/rdf/raw_nodes.py +++ /dev/null @@ -1,3 +0,0 @@ -# Auto-generated by generate_passthrough_modules.py - do not modify - -from .v0_2.raw_nodes import * diff --git a/bioimageio/spec/rdf/schema.py b/bioimageio/spec/rdf/schema.py deleted file mode 100644 index 9a6b0a4b9..000000000 --- a/bioimageio/spec/rdf/schema.py +++ /dev/null @@ -1,3 +0,0 @@ -# Auto-generated by generate_passthrough_modules.py - do not modify - -from .v0_2.schema import * diff --git a/bioimageio/spec/rdf/utils.py b/bioimageio/spec/rdf/utils.py deleted file mode 100644 index 1086c2eb1..000000000 --- a/bioimageio/spec/rdf/utils.py +++ /dev/null @@ -1,3 +0,0 @@ -# Auto-generated by generate_passthrough_modules.py - do not modify - -from .v0_2.utils import * diff --git a/bioimageio/spec/rdf/v0_2/__init__.py b/bioimageio/spec/rdf/v0_2/__init__.py deleted file mode 100644 index 431dc6a3e..000000000 --- a/bioimageio/spec/rdf/v0_2/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -from . import converters, raw_nodes, schema, utils -from .raw_nodes import FormatVersion - -try: - from typing import get_args -except ImportError: - from typing_extensions import get_args # type: ignore - -format_version = get_args(FormatVersion)[-1] diff --git a/bioimageio/spec/rdf/v0_2/converters.py b/bioimageio/spec/rdf/v0_2/converters.py deleted file mode 100644 index 93f1fce84..000000000 --- a/bioimageio/spec/rdf/v0_2/converters.py +++ /dev/null @@ -1,33 +0,0 @@ -import copy -from typing import Any, Dict - - -def remove_slash_from_names(data: Dict[str, Any]) -> None: - if "name" in data and isinstance(data["name"], str): - data["name"] = data["name"].replace("/", "").replace("\\", "") - - # remove slashes in author/maintainer name - authors = data.get("authors") - maintainers = data.get("maintainers") - persons = (authors if isinstance(authors, list) else []) + (maintainers if isinstance(maintainers, list) else []) - for p in persons: - if isinstance(p, dict) and "name" in p: - p["name"] = p["name"].replace("/", "").replace("\\", "") - - -def maybe_convert(data: Dict[str, Any]) -> Dict[str, Any]: - data = copy.deepcopy(data) - - # we unofficially accept strings as author entries... - authors = data.get("authors") - if isinstance(authors, list): - data["authors"] = [{"name": a} if isinstance(a, str) else a for a in authors] - - if data.get("format_version") in ("0.2.0", "0.2.1"): - data["format_version"] = "0.2.2" - - if data.get("format_version") == "0.2.2": - remove_slash_from_names(data) - data["format_version"] = "0.2.3" - - return data diff --git a/bioimageio/spec/rdf/v0_2/raw_nodes.py b/bioimageio/spec/rdf/v0_2/raw_nodes.py deleted file mode 100644 index 1b6aec4e9..000000000 --- a/bioimageio/spec/rdf/v0_2/raw_nodes.py +++ /dev/null @@ -1,173 +0,0 @@ -""" raw nodes for the general RDF spec - -raw nodes are the deserialized equivalent to the content of any RDF. -serialization and deserialization are defined in schema: -RDF <--schema--> raw nodes -""" -import dataclasses -import pathlib - -import packaging.version -import warnings -from dataclasses import dataclass -from pathlib import Path -from typing import Any, Dict, List, Union - -from marshmallow import missing -from marshmallow.utils import _Missing - -from bioimageio.spec.shared.raw_nodes import RawNode, ResourceDescription, URI - -try: - from typing import Literal, get_args -except ImportError: - from typing_extensions import Literal, get_args # type: ignore - -FormatVersion = Literal[ - "0.2.0", "0.2.1", "0.2.2", "0.2.3" -] # newest format needs to be last (used to determine latest format version) - - -@dataclass(init=False) -class Attachments(RawNode): - _include_in_package = ("files",) - - files: Union[_Missing, List[Union[Path, URI]]] = missing - unknown: Dict[str, Any] = missing - - def __init__( - self, - files: Union[_Missing, List[Union[Path, URI]]] = missing, - unknown: Dict[str, Any] = missing, - **implicitly_unknown, - ): - self.files = files - self.unknown = unknown or {} - self.unknown.update(implicitly_unknown) - super().__init__() - - -@dataclass -class _Person(RawNode): - name: Union[_Missing, str] = missing - affiliation: Union[_Missing, str] = missing - email: Union[_Missing, str] = missing - github_user: Union[_Missing, str] = missing - orcid: Union[_Missing, str] = missing - - -@dataclass -class Author(_Person): - name: str = missing - - -@dataclass -class Maintainer(_Person): - github_user: str = missing - - -@dataclass -class CiteEntry(RawNode): - text: str = missing - doi: Union[_Missing, str] = missing - url: Union[_Missing, str] = missing - - -@dataclass -class Badge(RawNode): - label: str = missing - icon: Union[_Missing, str] = missing - url: Union[_Missing, URI, Path] = missing - - -@dataclass -class RDF_Base(ResourceDescription): - attachments: Union[_Missing, Attachments] = missing - authors: Union[_Missing, List[Author]] = missing - badges: Union[_Missing, List[Badge]] = missing - cite: Union[_Missing, List[CiteEntry]] = missing - config: Union[_Missing, dict] = missing - covers: Union[_Missing, List[Union[URI, Path]]] = missing - description: str = missing - documentation: Union[_Missing, Path, URI] = missing - download_url: Union[_Missing, Path, URI] = missing - format_version: str = missing - git_repo: Union[_Missing, str] = missing - id: Union[_Missing, str] = missing - icon: Union[_Missing, str] = missing - license: Union[_Missing, str] = missing - links: Union[_Missing, List[str]] = missing - maintainers: Union[_Missing, List[Maintainer]] = missing - rdf_source: Union[_Missing, URI] = missing - source: Union[_Missing, URI, Path] = missing - tags: Union[_Missing, List[str]] = missing - - # manual __init__ to allow for unknown kwargs - def __init__( - self, - *, - # ResourceDescription - format_version: str, - name: str, - type: str = missing, - version: Union[_Missing, packaging.version.Version] = missing, - root_path: pathlib.Path = pathlib.Path(), - # RDF - attachments: Union[_Missing, Dict[str, Any]] = missing, - authors: Union[_Missing, List[Author]] = missing, - badges: Union[_Missing, List[Badge]] = missing, - cite: Union[_Missing, List[CiteEntry]] = missing, - config: Union[_Missing, dict] = missing, - covers: Union[_Missing, List[Union[URI, Path]]] = missing, - description: str, - documentation: Union[_Missing, Path, URI] = missing, - download_url: Union[_Missing, Path, URI] = missing, - git_repo: Union[_Missing, str] = missing, - id: Union[_Missing, str] = missing, - icon: Union[_Missing, str] = missing, - license: Union[_Missing, str] = missing, - links: Union[_Missing, List[str]] = missing, - maintainers: Union[_Missing, List[Maintainer]] = missing, - rdf_source: Union[_Missing, URI] = missing, - source: Union[_Missing, URI, Path] = missing, - tags: Union[_Missing, List[str]] = missing, - **unknown_kwargs, - ): - self.attachments = attachments - self.authors = authors - self.badges = badges - self.cite = cite - self.config = config - self.covers = covers - self.description = description - self.documentation = documentation - self.download_url = download_url - self.git_repo = git_repo - self.id = id - self.icon = icon - self.license = license - self.links = links - self.maintainers = maintainers - self.rdf_source = rdf_source - self.source = source - self.tags = tags - super().__init__(format_version=format_version, name=name, type=type, version=version, root_path=root_path) - - if unknown_kwargs: - # make sure we didn't forget a defined field - field_names = set(f.name for f in dataclasses.fields(self)) - for uk in unknown_kwargs: - assert uk not in field_names, uk - - warnings.warn(f"discarding unknown RDF fields: {unknown_kwargs}") - - def __post_init__(self): - if self.type is missing: - self.type = self.__class__.__name__.lower() - - super().__post_init__() - - -@dataclass(init=False) -class RDF(RDF_Base): - format_version: FormatVersion = missing diff --git a/bioimageio/spec/rdf/v0_2/schema.py b/bioimageio/spec/rdf/v0_2/schema.py deleted file mode 100644 index 1f9ab1890..000000000 --- a/bioimageio/spec/rdf/v0_2/schema.py +++ /dev/null @@ -1,278 +0,0 @@ -from types import ModuleType -from typing import ClassVar - -from marshmallow import EXCLUDE, ValidationError, validates, validates_schema - -from bioimageio.spec.shared import ( - BIOIMAGEIO_SITE_CONFIG, - BIOIMAGEIO_SITE_CONFIG_ERROR, - LICENSES, - field_validators, - fields, -) -from bioimageio.spec.shared.common import get_args, get_patched_format_version -from bioimageio.spec.shared.schema import SharedBioImageIOSchema, WithUnknown -from bioimageio.spec.shared.utils import is_valid_orcid_id -from . import raw_nodes -from .raw_nodes import FormatVersion - - -class _BioImageIOSchema(SharedBioImageIOSchema): - raw_nodes: ClassVar[ModuleType] = raw_nodes - - -class Attachments(_BioImageIOSchema, WithUnknown): - files = fields.List( - fields.Union([fields.URI(), fields.Path()]), - bioimageio_description="File attachments; included when packaging the resource.", - ) - - -class _Person(_BioImageIOSchema): - name = fields.Name(bioimageio_description="Full name.") - affiliation = fields.String(bioimageio_description="Affiliation.") - email = fields.Email(bioimageio_description="E-Mail") - github_user = fields.String(bioimageio_description="GitHub user name.") # todo: add validation? - orcid = fields.String( - validate=[ - field_validators.Length(19), - lambda oid: all(oid[idx] == "-" for idx in [4, 9, 14]), - lambda oid: is_valid_orcid_id(oid.replace("-", "")), - ], - bioimageio_description="[orcid](https://support.orcid.org/hc/en-us/sections/360001495313-What-is-ORCID) id " - "in hyphenated groups of 4 digits, e.g. '0000-0001-2345-6789' (and [valid](" - "https://support.orcid.org/hc/en-us/articles/360006897674-Structure-of-the-ORCID-Identifier" - ") as per ISO 7064 11,2.)", - ) - - -class Author(_Person): - name = fields.Name(required=True, bioimageio_description="Full name.") - - -class Maintainer(_Person): - github_user = fields.String(required=True, bioimageio_description="GitHub user name.") - - -class Badge(_BioImageIOSchema): - bioimageio_description = "Custom badge." - label = fields.String(required=True, bioimageio_description="e.g. 'Open in Colab'") - icon = fields.String(bioimageio_description="e.g. 'https://colab.research.google.com/assets/colab-badge.svg'") - url = fields.Union( - [fields.URL(), fields.Path()], - bioimageio_description="e.g. 'https://colab.research.google.com/github/HenriquesLab/ZeroCostDL4Mic/blob/master/Colab_notebooks/U-net_2D_ZeroCostDL4Mic.ipynb'", - ) - - -class CiteEntry(_BioImageIOSchema): - text = fields.String(required=True, bioimageio_description="free text description") - doi = fields.DOI( - bioimageio_maybe_required=True, - bioimageio_description="digital object identifier, see https://www.doi.org/ (alternatively specify `url`)", - ) - url = fields.String( - bioimageio_maybe_required=True, bioimageio_description="url to cite (alternatively specify `doi`)" - ) # todo: change to fields.URL - - @validates_schema - def doi_or_url(self, data, **kwargs): - if data.get("doi") is None and data.get("url") is None: - raise ValidationError("doi or url needs to be specified in a citation") - - -class RDF(_BioImageIOSchema): - class Meta: - unknown = EXCLUDE - - bioimageio_description = f"""# BioImage.IO Resource Description File Specification {get_args(FormatVersion)[-1]} -This specification defines the fields used in a general BioImage.IO-compliant resource description file (`RDF`). -An RDF is stored as a YAML file and describes resources such as models, datasets, applications and notebooks. -Note that models are described with an extended Model RDF specification. - -The RDF contains mandatory and optional fields. In the following description, optional fields are indicated by -_optional_. _optional*_ with an asterisk indicates the field is optional depending on the value in another field. -If no specialized RDF exists for the specified type (like model RDF for type='model') additional fields may be -specified. -""" - - attachments = fields.Nested(Attachments(), bioimageio_description="Additional unknown keys are allowed.") - - authors_bioimageio_description = ( - "A list of authors. The authors are the creators of the specifications and the primary points of contact." - ) - authors = fields.List(fields.Nested(Author()), bioimageio_description=authors_bioimageio_description) - - badges = fields.List(fields.Nested(Badge()), bioimageio_description="a list of badges") - - cite_bioimageio_description = """A list of citation entries. -Each entry contains a mandatory `text` field and either one or both of `doi` and `url`. -E.g. the citation for the model architecture and/or the training data used.""" - cite = fields.List(fields.Nested(CiteEntry()), bioimageio_description=cite_bioimageio_description) - - config_bioimageio_description = ( - "A custom configuration field that can contain any keys not present in the RDF spec. " - "This means you should not store, for example, github repo URL in `config` since we already have the " - "`git_repo` key defined in the spec.\n" - "Keys in `config` may be very specific to a tool or consumer software. To avoid conflicted definitions, " - "it is recommended to wrap configuration into a sub-field named with the specific domain or tool name, " - """for example: - -```yaml - config: - bioimage_io: # here is the domain name - my_custom_key: 3837283 - another_key: - nested: value - imagej: - macro_dir: /path/to/macro/file -``` -""" - "If possible, please use [`snake_case`](https://en.wikipedia.org/wiki/Snake_case) for keys in `config`." - ) - config = fields.YamlDict(bioimageio_descriptio=config_bioimageio_description) - - covers = fields.List( - fields.Union([fields.URL(), fields.Path()]), - bioimageio_description="A list of cover images provided by either a relative path to the model folder, or a " - "hyperlink starting with 'http[s]'. Please use an image smaller than 500KB and an aspect ratio width to height " - "of 2:1. The supported image formats are: 'jpg', 'png', 'gif'.", # todo: field_validators image format - ) - - description = fields.String(required=True, bioimageio_description="A string containing a brief description.") - - documentation = fields.Union( - [ - fields.URL(), - fields.Path( - validate=field_validators.Attribute( - "suffix", - field_validators.Equal( - ".md", error="{!r} is invalid; expected markdown file with '.md' extension." - ), - ) - ), - ], - bioimageio_description="URL or relative path to markdown file with additional documentation. " - "For markdown files the recommended documentation file name is `README.md`.", - ) - - download_url = fields.Union( - [fields.URL(), fields.Path()], bioimageio_description="optional url to download the resource from" - ) - - format_version = fields.String( - required=True, - bioimageio_description_order=0, - bioimageio_description=( - "Version of the BioImage.IO Resource Description File Specification used." - f"The current general format version described here is {get_args(FormatVersion)[-1]}. " - "Note: The general RDF format is not to be confused with specialized RDF format like the Model RDF format." - ), - ) - - @validates_schema - def format_version_matches_type(self, data, **kwargs): - format_version = data.get("format_version") - type_ = data.get("type") - try: - patched_format_version = get_patched_format_version(type_, format_version) - if format_version.split(".") > patched_format_version.split("."): - raise ValueError( - f"Unknown format_version {format_version} (latest patch: {patched_format_version}; latest format version: )" - ) - except Exception as e: - raise ValidationError(f"Invalid format_version {format_version} for RDF type {type_}. (error: {e})") - - git_repo_bioimageio_description = "A url to the git repository, e.g. to Github or Gitlab." - git_repo = fields.URL(bioimageio_description=git_repo_bioimageio_description) - - icon = fields.String( - bioimageio_description="an icon for the resource" - ) # todo: limit length? validate=field_validators.Length(max=1) - - id = fields.String(bioimageio_description="Unique id within a collection of resources.") - license_bioimageio_description = ( - "A [SPDX license identifier](https://spdx.org/licenses/)(e.g. `CC-BY-4.0`, `MIT`, " - "`BSD-2-Clause`). We don't support custom license beyond the SPDX license list, if you need that please send " - "an Github issue to discuss your intentions with the community." - ) - license = fields.String( # todo: make mandatory? - # validate=field_validators.OneOf(LICENSES), # enforce license id - bioimageio_description=license_bioimageio_description - ) - - @validates("license") - def warn_about_deprecated_spdx_license(self, value: str): - license_info = LICENSES.get(value) - if license_info is None: - self.warn("license", f"{value} is not a recognized SPDX license identifier. See https://spdx.org/licenses/") - else: - if license_info.get("isDeprecatedLicenseId", False): - self.warn("license", f"{value} ({license_info['name']}) is deprecated.") - - if not license_info.get("isFsfLibre", False): - self.warn("license", f"{value} ({license_info['name']}) is not FSF Free/libre.") - - links = fields.List(fields.String(), bioimageio_description="links to other bioimage.io resources") - - maintainers = fields.List(fields.Nested(Maintainer()), bioimageio_description="Maintainers of this resource.") - - name = fields.Name(required=True, bioimageio_description="name of the resource, a human-friendly name") - - @validates - def warn_about_long_name(self, value: str): - if isinstance(value, str): - if len(value) > 64: - self.warn( - "name", f"Length of name ({len(value)}) exceeds the recommended maximum length of 64 characters." - ) - else: - self.warn("name", f"Could not check length of name {value}.") - - rdf_source = fields.Union( - [fields.URL(), fields.DOI()], bioimageio_description="url or doi to the source of the resource definition" - ) - source = fields.Union( - [fields.URI(), fields.Path()], - bioimageio_description="url or local relative path to the source of the resource", - ) - - tags = fields.List(fields.String(), bioimageio_description="A list of tags.") - - @validates("tags") - def warn_about_tag_categories(self, value): - if BIOIMAGEIO_SITE_CONFIG is None: - error = BIOIMAGEIO_SITE_CONFIG_ERROR - else: - missing_categories = [] - try: - categories = { - c["type"]: c.get("tag_categories", {}) for c in BIOIMAGEIO_SITE_CONFIG["resource_categories"] - }.get(self.__class__.__name__.lower(), {}) - for cat, entries in categories.items(): - if not any(e in value for e in entries): - missing_categories.append({cat: entries}) - except Exception as e: - error = str(e) - else: - error = None - if missing_categories: - self.warn("tags", f"Missing tags corresponding to bioimage.io categories: {missing_categories}") - - if error is not None: - self.warn("tags", f"could not check tag categories ({error})") - - type = fields.String(required=True) - - # todo: restrict valid RDF types? - @validates("type") - def validate_type(self, value): - schema_type = self.__class__.__name__.lower() - if value != schema_type: - self.warn("type", f"Unrecognized type '{value}'. Validating as {schema_type}.") - - version = fields.Version( - bioimageio_description="The version number of the model. The version number format must be a string in " - "`MAJOR.MINOR.PATCH` format following the guidelines in Semantic Versioning 2.0.0 (see https://semver.org/), " - "e.g. the initial version number should be `0.1.0`." - ) diff --git a/bioimageio/spec/rdf/v0_2/utils.py b/bioimageio/spec/rdf/v0_2/utils.py deleted file mode 100644 index 6a621adf7..000000000 --- a/bioimageio/spec/rdf/v0_2/utils.py +++ /dev/null @@ -1,5 +0,0 @@ -from . import raw_nodes - - -def filter_resource_description(raw_rd: raw_nodes.RDF_Base) -> raw_nodes.RDF_Base: - return raw_rd diff --git a/bioimageio/spec/shared/__init__.py b/bioimageio/spec/shared/__init__.py deleted file mode 100644 index ef1fa9193..000000000 --- a/bioimageio/spec/shared/__init__.py +++ /dev/null @@ -1,28 +0,0 @@ -from __future__ import annotations - -import json -from pathlib import Path - -from ._resolve_source import ( - BIOIMAGEIO_COLLECTION, - BIOIMAGEIO_COLLECTION_ERROR, - BIOIMAGEIO_SITE_CONFIG, - BIOIMAGEIO_SITE_CONFIG_ERROR, - DownloadCancelled, - RDF_NAMES, - _resolve_json_from_url, - get_resolved_source_path, - resolve_local_source, - resolve_rdf_source, - resolve_rdf_source_and_type, - resolve_source, - source_available, -) -from ._update_nested import update_nested -from .common import get_args, yaml # noqa - -_license_file = Path(__file__).parent.parent / "static" / "licenses.json" -_license_data = json.loads(_license_file.read_text(encoding="utf-8")) - -LICENSES = {x["licenseId"]: x for x in _license_data["licenses"]} -LICENSE_DATA_VERSION = _license_data["licenseListVersion"] diff --git a/bioimageio/spec/shared/_resolve_source.py b/bioimageio/spec/shared/_resolve_source.py deleted file mode 100644 index eeb7c42fd..000000000 --- a/bioimageio/spec/shared/_resolve_source.py +++ /dev/null @@ -1,546 +0,0 @@ -import json -import os -import pathlib -import re -import shutil -import typing -import warnings -import zipfile -from functools import singledispatch -from io import BytesIO, StringIO -from tempfile import TemporaryDirectory -from urllib.request import url2pathname, urlopen - -from marshmallow import ValidationError - -from . import fields, raw_nodes -from .common import ( - BIOIMAGEIO_CACHE_PATH, - BIOIMAGEIO_CACHE_WARNINGS_LIMIT, - BIOIMAGEIO_COLLECTION_URL, - BIOIMAGEIO_SITE_CONFIG_URL, - BIOIMAGEIO_USE_CACHE, - DOI_REGEX, - RDF_NAMES, - CacheWarning, - get_spec_type_from_type, - no_cache_tmp_list, - tqdm, - yaml, -) -from .raw_nodes import URI - - -class DownloadCancelled(Exception): - # raise this exception to stop _download_url - pass - - -def _is_path(s: typing.Any) -> bool: - if not isinstance(s, (str, os.PathLike)): - return False - - try: - return pathlib.Path(s).exists() - except OSError: - return False - - -class RDF_Source(typing.NamedTuple): - data: dict - name: str - root: typing.Union[pathlib.Path, raw_nodes.URI] - - -def resolve_rdf_source( - source: typing.Union[dict, os.PathLike, typing.IO, str, bytes, URI, raw_nodes.ResourceDescription] -) -> RDF_Source: - # reduce possible source types - if isinstance(source, (BytesIO, StringIO)): - source = source.getvalue() - elif isinstance(source, os.PathLike): - source = pathlib.Path(source) - elif isinstance(source, raw_nodes.URI): - source = str(source) - elif isinstance(source, raw_nodes.ResourceDescription): - from bioimageio.spec.io_ import serialize_raw_resource_description_to_dict - - source = serialize_raw_resource_description_to_dict(source) - - if not isinstance(source, (dict, pathlib.Path, str, bytes)): - raise TypeError(f"Unexpected source type {type(source)}") - - if isinstance(source, pathlib.Path): - source_name = str(source) - root: typing.Union[pathlib.Path, raw_nodes.URI] = source.parent - elif isinstance(source, dict): - source_name = f"{{name: {source.get('name', '')}, ...}}" - source = dict(source) - given_root = source.pop("root_path", pathlib.Path()) - if _is_path(given_root): - root = pathlib.Path(given_root) - elif isinstance(given_root, URI): - root = given_root - elif isinstance(given_root, str): - root = URI(uri_string=given_root) - else: - raise ValueError(f"Encountered invalid root {given_root}") - elif isinstance(source, (str, bytes)): - source_name = str(source[:120]) + "..." - # string might be path or yaml string; for yaml string (or bytes) set root to cwd - - if _is_path(source): - assert isinstance(source, (str, os.PathLike)) - root = pathlib.Path(source).parent - else: - root = pathlib.Path() - else: - raise TypeError(source) - - if isinstance(source, str): - # source might be bioimageio nickname, id, doi, url or file path -> resolve to pathlib.Path - - bioimageio_rdf_source: typing.Optional[str] = (BIOIMAGEIO_COLLECTION_ENTRIES or {}).get(source, (None, None))[1] - - if bioimageio_rdf_source is not None: - # source is bioimageio id or bioimageio nickname - source = bioimageio_rdf_source - elif re.fullmatch(DOI_REGEX, source): # turn doi into url - zenodo_prefix = "10.5281/zenodo." - zenodo_record_api = "https://zenodo.org/api/records" - zenodo_sandbox_prefix = "10.5072/zenodo." - zenodo_sandbox_record_api = "https://sandbox.zenodo.org/api/records" - is_zenodo_doi = False - if source.startswith(zenodo_prefix): - is_zenodo_doi = True - elif source.startswith(zenodo_sandbox_prefix): - # zenodo sandbox doi (which is not a valid doi) - zenodo_prefix = zenodo_sandbox_prefix - zenodo_record_api = zenodo_sandbox_record_api - is_zenodo_doi = True - - if is_zenodo_doi: - # source is a doi pointing to a zenodo record; - # we'll expect an rdf.yaml file in that record and use it as source... - record_id = source[len(zenodo_prefix) :] - s_count = record_id.count("/") - if s_count: - # record_id/record_version_id - if s_count != 1: - warnings.warn( - f"Unexpected Zenodo record ids: {record_id}. " - f"Expected or /." - ) - - record_id = record_id.split("/")[-1] - - source = f"{zenodo_record_api}/{record_id}/files/rdf.yaml/content" - else: - # resolve doi - # todo: make sure the resolved url points to a rdf.yaml or a zipped package - response = urlopen(f"https://doi.org/{source}?type=URL") - source = response.url - assert isinstance(source, str) - if not (source.endswith(".yaml") or source.endswith(".zip")): - raise NotImplementedError( - f"Resolved doi {source_name} to {source}, but don't know where to find 'rdf.yaml' " - f"or a packaged resource zip file." - ) - - assert isinstance(source, str) - if source.startswith("http"): - source_url = raw_nodes.URI(uri_string=source) - source = _download_url(source_url) - root = source_url.parent - - if _is_path(source): - source = pathlib.Path(source) - - if isinstance(source, (pathlib.Path, str, bytes)): - # source is either: - # - a file path (to a yaml or a packaged zip) - # - a yaml string, - # - or yaml file or zip package content as bytes - - if yaml is None: - raise RuntimeError(f"Cannot read RDF from {source_name} without ruamel.yaml dependency!") - - if isinstance(source, bytes): - potential_package: typing.Union[pathlib.Path, typing.IO, str] = BytesIO(source) - potential_package.seek(0) # type: ignore - else: - potential_package = source - - if zipfile.is_zipfile(potential_package): - with zipfile.ZipFile(potential_package) as zf: - for rdf_name in RDF_NAMES: - if rdf_name in zf.namelist(): - break - else: - raise ValueError(f"Missing 'rdf.yaml' in package {source_name}") - - if isinstance(source, os.PathLike): - root = pathlib.Path(source) - else: - root = pathlib.Path() - - assert isinstance(source, (pathlib.Path, bytes)) - source = BytesIO(zf.read(rdf_name)) - - source = yaml.load(source) - - if not isinstance(source, dict): - raise TypeError( - f"Expected dict type for loaded source, but got: {type(source)}. " - f"If '{str(source)}' is a file path, does it exist?" - ) - - return RDF_Source(source, source_name, root) - - -def resolve_rdf_source_and_type( - source: typing.Union[os.PathLike, typing.IO, bytes, str, dict, raw_nodes.URI] -) -> typing.Tuple[dict, str, typing.Union[pathlib.Path, raw_nodes.URI], str]: - data, source_name, root = resolve_rdf_source(source) - - type_ = get_spec_type_from_type(data.get("type")) - return data, source_name, root, type_ - - -@singledispatch # todo: fix type annotations -def resolve_source(source, root_path: typing.Union[os.PathLike, URI] = pathlib.Path(), output=None, pbar=None): - """Resolve sources to local files - - Args: - source: e.g. a path or uri - root_path: path to model-rdf - used to resolve relative paths - output: file path to write contents to - if not given a file path is created - pbar: progress bar sharing a minimal tqdm interface, if none given, tqdm is used. - pbar is only used in the case of downloading resources. Specifying a custom pbar here - helps adding features like progress reporting (outside the cmd) and cancellation - (by raising DownloadCancelled). - """ - raise TypeError(type(source)) - - -@resolve_source.register -def _resolve_source_uri_node( - source: raw_nodes.URI, - root_path: typing.Union[os.PathLike, URI] = pathlib.Path(), - output: typing.Optional[os.PathLike] = None, - pbar=None, -) -> pathlib.Path: - path_or_remote_uri = resolve_local_source(source, root_path, output) - if isinstance(path_or_remote_uri, raw_nodes.URI): - local_path = _download_url(path_or_remote_uri, output, pbar=pbar) - elif isinstance(path_or_remote_uri, pathlib.Path): - local_path = path_or_remote_uri - else: - raise TypeError(path_or_remote_uri) - - return local_path - - -@resolve_source.register -def _resolve_source_str( - source: str, - root_path: typing.Union[os.PathLike, URI] = pathlib.Path(), - output: typing.Optional[os.PathLike] = None, - pbar=None, -) -> pathlib.Path: - return resolve_source(fields.Union([fields.URI(), fields.Path()]).deserialize(source), root_path, output, pbar) - - -@resolve_source.register -def _resolve_source_path( - source: pathlib.Path, - root_path: typing.Union[os.PathLike, URI] = pathlib.Path(), - output: typing.Optional[os.PathLike] = None, - pbar=None, -) -> pathlib.Path: - if not os.path.isabs(source): - if isinstance(root_path, os.PathLike): - root_path = pathlib.Path(root_path).resolve() - source = root_path / source - if isinstance(source, URI): - return resolve_source(source, output=output, pbar=pbar) - - if output is None: - return source - else: - try: - shutil.copyfile(source, output) - except shutil.SameFileError: # source and output are identical - pass - return pathlib.Path(output) - - -@resolve_source.register -def _resolve_source_resolved_importable_path( - source: raw_nodes.ResolvedImportableSourceFile, - root_path: typing.Union[os.PathLike, URI] = pathlib.Path(), - output: typing.Optional[os.PathLike] = None, - pbar=None, -) -> raw_nodes.ResolvedImportableSourceFile: - return raw_nodes.ResolvedImportableSourceFile( - callable_name=source.callable_name, source_file=resolve_source(source.source_file, root_path, output, pbar) - ) - - -@resolve_source.register -def _resolve_source_importable_path( - source: raw_nodes.ImportableSourceFile, - root_path: typing.Union[os.PathLike, URI] = pathlib.Path(), - output: typing.Optional[os.PathLike] = None, - pbar=None, -) -> raw_nodes.ResolvedImportableSourceFile: - return raw_nodes.ResolvedImportableSourceFile( - callable_name=source.callable_name, source_file=resolve_source(source.source_file, root_path, output, pbar) - ) - - -@resolve_source.register -def _resolve_source_list( - source: list, - root_path: typing.Union[os.PathLike, URI] = pathlib.Path(), - output: typing.Optional[typing.Sequence[typing.Optional[os.PathLike]]] = None, - pbar: typing.Optional[typing.Sequence] = None, -) -> typing.List[pathlib.Path]: - assert output is None or len(output) == len(source) - assert pbar is None or len(pbar) == len(source) - return [ - resolve_source(el, root_path, out, pb) - for el, out, pb in zip(source, output or [None] * len(source), pbar or [None] * len(source)) - ] - - -def get_resolved_source_path( - source: typing.Union[ - raw_nodes.URI, str, pathlib.Path, raw_nodes.ResolvedImportableSourceFile, raw_nodes.ImportableSourceFile - ], - root_path: typing.Union[os.PathLike, URI], - pbar=None, -) -> pathlib.Path: - resolved = resolve_source(source, root_path=root_path, pbar=pbar) - if isinstance(resolved, os.PathLike): - return pathlib.Path(resolved) - elif isinstance(resolved, raw_nodes.ResolvedImportableSourceFile): - return resolved.source_file - else: - raise NotImplementedError(type(resolved)) - - -def resolve_local_sources( - sources: typing.Sequence[typing.Union[str, os.PathLike, raw_nodes.URI]], - root_path: os.PathLike, - outputs: typing.Optional[typing.Sequence[typing.Optional[os.PathLike]]] = None, -) -> typing.List[typing.Union[pathlib.Path, raw_nodes.URI]]: - if outputs is None: - outputs = [None] * len(sources) - - assert outputs is not None - assert len(outputs) == len(sources) - return [resolve_local_source(src, root_path, out) for src, out in zip(sources, outputs)] - - -def resolve_local_source( - source: typing.Union[str, os.PathLike, raw_nodes.URI], - root_path: typing.Union[os.PathLike, URI], - output: typing.Optional[os.PathLike] = None, -) -> typing.Union[pathlib.Path, raw_nodes.URI]: - if isinstance(source, os.PathLike) or isinstance(source, str): - if isinstance(root_path, os.PathLike): - try: # source as relative path from root_path - source_from_root = pathlib.Path(root_path) / source - is_path_rp = source_from_root.exists() - except OSError: - pass - else: - if is_path_rp: - source = source_from_root - - source = pathlib.Path(source) - if not source.exists(): - raise FileNotFoundError(f"Could not find {source}") - - if output is None: - return source - else: - try: - shutil.copyfile(source, output) - except shutil.SameFileError: - pass - return pathlib.Path(output) - - if isinstance(source, str): - uri = fields.URI().deserialize(source) - else: - uri = source - - assert isinstance(uri, raw_nodes.URI), uri - if uri.scheme == "file": - local_path_or_remote_uri: typing.Union[pathlib.Path, raw_nodes.URI] = pathlib.Path(url2pathname(uri.path)) - elif uri.scheme in ("https", "https"): - local_path_or_remote_uri = uri - else: - raise ValueError(f"Unknown uri scheme {uri.scheme}") - - return local_path_or_remote_uri - - -def source_available(source: typing.Union[pathlib.Path, raw_nodes.URI], root_path: pathlib.Path) -> bool: - local_path_or_remote_uri = resolve_local_source(source, root_path) - if isinstance(local_path_or_remote_uri, raw_nodes.URI): - import requests # not available in pyodide - - response = requests.head(str(local_path_or_remote_uri)) - for n_redirect in range(100): - if ( - response.status_code in (301, 302, 307, 308) - and response.next is not None - and response.next.url is not None - ): - response = requests.head(response.next.url) - else: - break - - available = response.status_code == 200 - elif isinstance(local_path_or_remote_uri, pathlib.Path): - available = local_path_or_remote_uri.exists() - else: - raise TypeError(local_path_or_remote_uri) - - return available - - -cache_warnings_count = 0 - - -def _download_url(uri: raw_nodes.URI, output: typing.Optional[os.PathLike] = None, pbar=None) -> pathlib.Path: - global cache_warnings_count - - if output is not None: - local_path = pathlib.Path(output) - elif BIOIMAGEIO_USE_CACHE: - # todo: proper caching - if uri.authority == "zenodo.org" and uri.path.startswith("/api/records/") and uri.path.endswith("/content"): - p = uri.path[: -len("/content")].strip("/") - else: - p = uri.path.strip("/") - - local_path = BIOIMAGEIO_CACHE_PATH / uri.scheme / uri.authority / p / uri.query - else: - tmp_dir = TemporaryDirectory() - no_cache_tmp_list.append(tmp_dir) # keep temporary file until process ends - local_path = pathlib.Path(tmp_dir.name) / "file" - - if local_path.exists(): - cache_warnings_count += 1 - if cache_warnings_count <= BIOIMAGEIO_CACHE_WARNINGS_LIMIT: - warnings.warn(f"found cached {local_path}. Skipping download of {uri}.", category=CacheWarning) - if cache_warnings_count == BIOIMAGEIO_CACHE_WARNINGS_LIMIT: - warnings.warn( - f"Reached cache warnings limit. No more warnings about cache hits will be issued.", - category=CacheWarning, - ) - - else: - local_path.parent.mkdir(parents=True, exist_ok=True) - - import requests # not available in pyodide - - try: - # download with tqdm adapted from: - # https://github.com/shaypal5/tqdl/blob/189f7fd07f265d29af796bee28e0893e1396d237/tqdl/core.py - # Streaming, so we can iterate over the response. - headers = {} - if os.environ.get("CI", "false").lower() in ("1", "t", "true", "yes", "y"): - headers["User-Agent"] = "ci" - - user_agent = os.environ.get("BIOIMAGEIO_USER_AGENT") - if user_agent is not None: - headers["User-Agent"] = user_agent - - r = requests.get(str(uri), stream=True, headers=headers) - r.raise_for_status() - # Total size in bytes. - total_size = int(r.headers.get("content-length", 0)) - block_size = 1024 # 1 Kibibyte - if pbar: - t = pbar(total=total_size, unit="iB", unit_scale=True, desc=local_path.name) - else: - t = tqdm(total=total_size, unit="iB", unit_scale=True, desc=local_path.name) - tmp_path = local_path.with_suffix(f"{local_path.suffix}.part") - with tmp_path.open("wb") as f: - for data in r.iter_content(block_size): - t.update(len(data)) - f.write(data) - - t.close() - if total_size != 0 and hasattr(t, "n") and t.n != total_size: - # todo: check more carefully and raise on real issue - warnings.warn(f"Download ({t.n}) does not have expected size ({total_size}).") - - shutil.move(f.name, str(local_path)) - except DownloadCancelled as e: - # let calling code handle this exception specifically -> allow for cancellation of - # long running downloads per user request - raise e - except Exception as e: - raise RuntimeError(f"Failed to download {uri} ({e})") from e - - return local_path - - -T = typing.TypeVar("T") - - -def _resolve_json_from_url( - url: str, - expected_type: typing.Union[typing.Type[dict], typing.Type[T]] = dict, - warning_msg: typing.Optional[str] = "Failed to fetch {url}: {error}", - encoding: typing.Optional[str] = None, -) -> typing.Tuple[typing.Optional[T], typing.Optional[str]]: - try: - p = resolve_source(url) - with p.open(encoding=encoding) as f: - data = json.load(f) - - assert isinstance(data, expected_type) - except Exception as e: - data = None - error: typing.Optional[str] = str(e) - if warning_msg: - warnings.warn(warning_msg.format(url=url, error=error)) - else: - error = None - - return data, error - - -BIOIMAGEIO_SITE_CONFIG, BIOIMAGEIO_SITE_CONFIG_ERROR = _resolve_json_from_url( - BIOIMAGEIO_SITE_CONFIG_URL, encoding="utf-8", warning_msg=None -) -BIOIMAGEIO_COLLECTION, BIOIMAGEIO_COLLECTION_ERROR = _resolve_json_from_url( - BIOIMAGEIO_COLLECTION_URL, encoding="utf-8", warning_msg=None -) -if BIOIMAGEIO_COLLECTION is None: - BIOIMAGEIO_COLLECTION_ENTRIES: typing.Optional[typing.Dict[str, typing.Tuple[str, str]]] = None -else: - BIOIMAGEIO_COLLECTION_ENTRIES = {} - for cr in BIOIMAGEIO_COLLECTION.get("collection", []): - if "id" in cr and "rdf_source" in cr and "type" in cr: - entry = (cr["type"], cr["rdf_source"]) - BIOIMAGEIO_COLLECTION_ENTRIES[cr["id"]] = entry - - if "nickname" in cr: - BIOIMAGEIO_COLLECTION_ENTRIES[cr["nickname"]] = entry - - # add resource versions explicitly - for cv in cr.get("versions", []): - BIOIMAGEIO_COLLECTION_ENTRIES[f"{cr['id']}/{cv}"] = ( - cr["type"], - cr["rdf_source"].replace( - f"/{cr['versions'][0]}", f"/{cv}" - ), # todo: improve this replace-version-monkeypatch - ) diff --git a/bioimageio/spec/shared/_update_nested.py b/bioimageio/spec/shared/_update_nested.py deleted file mode 100644 index 7fef98824..000000000 --- a/bioimageio/spec/shared/_update_nested.py +++ /dev/null @@ -1,10 +0,0 @@ -from typing import TypeVar, Union - -from .node_transformer import NestedUpdateTransformer -from .raw_nodes import RawNode - -UpdateType = TypeVar("UpdateType") - - -def update_nested(data: Union[dict, list, RawNode], update: UpdateType) -> Union[dict, list, RawNode, UpdateType]: - return NestedUpdateTransformer().transform(data, update) diff --git a/bioimageio/spec/shared/common.py b/bioimageio/spec/shared/common.py deleted file mode 100644 index ddba5ac59..000000000 --- a/bioimageio/spec/shared/common.py +++ /dev/null @@ -1,216 +0,0 @@ -import getpass -import os -import pathlib -import tempfile -import warnings -from typing import Any, Dict, Generic, Iterable, List, Optional, Sequence, Union - -try: - from typing import Literal, get_args, get_origin, Protocol, TypedDict -except ImportError: - from typing_extensions import Literal, get_args, get_origin, Protocol, TypedDict # type: ignore - - -try: - from ruamel.yaml import YAML # not available in pyodide -except ImportError: - yaml: Optional["MyYAML"] = None -else: - - class MyYAML(YAML): - """add convenient improvements over YAML - improve dump: - - make sure to dump with utf-8 encoding. on windows encoding 'windows-1252' may otherwise be used - - expose indentation kwargs for dump - """ - - def dump(self, data, stream=None, *, transform=None): - if isinstance(stream, pathlib.Path): - with stream.open("wt", encoding="utf-8") as f: - return super().dump(data, f, transform=transform) - else: - return super().dump(data, stream, transform=transform) - - yaml = MyYAML(typ="safe") - - -try: - from tqdm import tqdm # not available in pyodide -except ImportError: - - class tqdm: # type: ignore - """no-op tqdm""" - - def __init__(self, iterable: Optional[Iterable] = None, *args, **kwargs): - self.iterable = iterable - - def __iter__(self): - if self.iterable is not None: - yield from self.iterable - - def update(self, *args, **kwargs): - pass - - def close(self): - pass - - -class CacheWarning(RuntimeWarning): - pass - - -BIOIMAGEIO_CACHE_PATH = pathlib.Path( - os.getenv("BIOIMAGEIO_CACHE_PATH", pathlib.Path(tempfile.gettempdir()) / getpass.getuser() / "bioimageio_cache") -) -BIOIMAGEIO_USE_CACHE = os.getenv("BIOIMAGEIO_USE_CACHE", "true").lower() in ("true", "yes", "1") -BIOIMAGEIO_CACHE_WARNINGS_LIMIT = int(os.getenv("BIOIMAGEIO_CACHE_WARNINGS_LIMIT", 3)) - -# keep a reference to temporary directories and files. -# These temporary locations are used instead of paths in BIOIMAGEIO_CACHE_PATH if BIOIMAGEIO_USE_CACHE is true, -# so that we guarantee that these temporary directories/file objects are not garbage collected too early -# and thus their content removed from disk, while we still have a pathlib.Path pointing there -no_cache_tmp_list: List[Any] = [] - -BIOIMAGEIO_SITE_CONFIG_URL = "https://raw.githubusercontent.com/bioimage-io/bioimage.io/main/site.config.json" -BIOIMAGEIO_COLLECTION_URL = "https://bioimage-io.github.io/collection-bioimage-io/collection.json" - - -DOI_REGEX = r"^10[.][0-9]{4,9}\/[-._;()\/:A-Za-z0-9]+$" -RDF_NAMES = ("rdf.yaml", "model.yaml") - - -class ValidationWarning(UserWarning): - """a warning category to warn with during RDF validation""" - - @staticmethod - def get_warning_summary(val_warns: Optional[Sequence[warnings.WarningMessage]]) -> dict: - """Summarize warning messages of the ValidationWarning category""" - - def add_val_warn_to_summary(s, keys, msg): - key = keys.pop(0) - if "[" in key: - key, rest = key.split("[") - assert rest[-1] == "]" - idx = int(rest[:-1]) - else: - idx = None - - if key not in s: - s[key] = {} if keys or idx is not None else msg - - s = s[key] - - if idx is not None: - if idx not in s: - s[idx] = {} if keys else {"warning": msg} - - s = s[idx] - - if keys: - assert isinstance(s, dict), (keys, s) - add_val_warn_to_summary(s, keys, msg) - - summary: dict = {} - nvw: set = set() - for vw in val_warns or []: - msg = str(vw.message) - if issubclass(vw.category, ValidationWarning): - if ": " in msg: - keys_, *rest = msg.split(": ") - msg = ": ".join(rest) - keys = keys_.split(":") - else: - keys = [] - - add_val_warn_to_summary(summary, keys, msg) - else: - nvw.add(msg) - - if nvw: - summary["non-validation-warnings"] = list(nvw) - - return summary - - -class ValidationSummary(TypedDict): - bioimageio_spec_version: str - error: Union[None, str, Dict[str, Any]] - name: str - nested_errors: Optional[ - Dict[str, dict] - ] # todo: mark as not required: typing_extensions.NotRequired (typing py 3.11) - source_name: str - status: Union[Literal["passed", "failed"], str] - traceback: Optional[List[str]] - warnings: dict - - -def get_format_version_module(type_: str, format_version: str): - assert "." in format_version - import bioimageio.spec - - type_ = get_spec_type_from_type(type_) - version_mod_name = "v" + "_".join(format_version.split(".")[:2]) - try: - return getattr(getattr(bioimageio.spec, type_), version_mod_name) - except AttributeError: - raise ValueError( - f"Invalid RDF format version {format_version} for RDF type {type_}. " - f"Submodule bioimageio.spec.{type_}.{version_mod_name} does not exist." - ) - - -def get_patched_format_version(type_: str, format_version: str): - """return latest patched format version for given type and major/minor of format_version""" - version_mod = get_format_version_module(type_, format_version) - return version_mod.format_version - - -def get_spec_type_from_type(type_: Optional[str]): - if type_ in ("model", "collection", "dataset"): - return type_ - else: - return "rdf" - - -def get_latest_format_version_module(type_: str): - type_ = get_spec_type_from_type(type_) - import bioimageio.spec - - try: - return getattr(bioimageio.spec, type_) - except AttributeError: - raise ValueError(f"Invalid RDF type {type_}") - - -def get_latest_format_version(type_: str): - return get_latest_format_version_module(type_).format_version - - -def get_class_name_from_type(type_: str): - type_ = get_spec_type_from_type(type_) - if type_ == "rdf": - return "RDF" - else: - return type_.title() - - -def get_args_flat(tp): - flat_args = [] - for a in get_args(tp): - orig = get_origin(a) - if orig is Literal or orig is Generic: - flat_args += list(get_args(a)) - else: - flat_args.append(a) - - return tuple(flat_args) - - -def nested_default_dict_as_nested_dict(nested_dd): - if isinstance(nested_dd, dict): - return {key: (nested_default_dict_as_nested_dict(value)) for key, value in nested_dd.items()} - elif isinstance(nested_dd, list): - return [nested_default_dict_as_nested_dict(value) for value in nested_dd] - else: - return nested_dd diff --git a/bioimageio/spec/shared/field_validators.py b/bioimageio/spec/shared/field_validators.py deleted file mode 100644 index f6012d786..000000000 --- a/bioimageio/spec/shared/field_validators.py +++ /dev/null @@ -1,102 +0,0 @@ -import typing - -from marshmallow.validate import ( - ContainsNoneOf, - Equal, - Length, - OneOf, - Predicate as MarshmallowPredicate, - Range, - URL as MarshmallowURL, - ValidationError, - Validator, -) - -ContainsNoneOf = ContainsNoneOf -Equal = Equal -Length = Length -OneOf = OneOf -Range = Range - - -class Attribute(Validator): - - default_message = "Invalid input." - - def __init__( - self, - attribute: str, - validate: typing.Union[ - typing.Callable[[typing.Any], typing.Any], typing.Iterable[typing.Callable[[typing.Any], typing.Any]] - ], - is_getter_method: bool = False, - ): - self.attribute = attribute - self.is_getter_method = is_getter_method - self.validate = [validate] if callable(validate) else validate - - def _repr_args(self) -> str: - return "attribute={!r}, is_getter_method={!r}, validate={!r}".format( - self.attribute, self.is_getter_method, self.validate - ) - - def __call__(self, value: typing.Any) -> typing.Any: - try: - attr = getattr(value, self.attribute) - if self.is_getter_method: - attr = attr() - except Exception as e: - raise ValidationError(str(e)) from e - - try: - return all(validator(attr) for validator in self.validate) - except Exception as e: - raise ValidationError(f"Invalid {self.attribute} ({value}): {str(e)}") from e - - -class Predicate(MarshmallowPredicate): - """extends marshmallow.Predicate by accepting *args and 'invert_output' . - Call the specified ``method`` of the ``value`` object. The - validator succeeds if the invoked method returns an object that - evaluates to True in a Boolean context. Any additional arguments - and keyword arguments will be passed to the method. - - :param method: The name of the method to invoke. - :param args: Additional arguments to pass to the method. - :param invert_output: Flag to succeed if method returns an object that evaluates to False instead of True. - :param error: Error message to raise in case of a validation error. - Can be interpolated with `{input}` and `{method}`. - :param kwargs: Additional keyword arguments to pass to the method. - """ - - default_message = "Invalid input." - - def __init__(self, method: str, *args, invert_output: bool = False, error: typing.Optional[str] = None, **kwargs): - super().__init__(method, error=error, **kwargs) - self.args = args - self.invert_output = invert_output - - def _format_error(self, value: typing.Any) -> str: - return self.error.format(input=value, method=self.method, args=self.args, invert_output=self.invert_output) - - def _repr_args(self) -> str: - return "method={!r}, invert_output={!r}, args={!r}, kwargs={!r}".format( - self.method, self.invert_output, self.args, self.kwargs - ) - - def __call__(self, value: typing.Any) -> typing.Any: - method = getattr(value, self.method) - - ret = method(*self.args, **self.kwargs) - if self.invert_output: - ret = not ret - - if not ret: - raise ValidationError(self._format_error(value)) - - return value - - -class URL(MarshmallowURL): - def __call__(self, value: typing.Any): - return super().__call__(str(value)) # cast value which might be a raw_nodes.URI to string diff --git a/bioimageio/spec/shared/fields.py b/bioimageio/spec/shared/fields.py deleted file mode 100644 index ba062de61..000000000 --- a/bioimageio/spec/shared/fields.py +++ /dev/null @@ -1,566 +0,0 @@ -"""fields to be used in the versioned schemas (may return shared raw nodes on `deserialize`""" -import datetime -import logging -import pathlib -import typing - -import marshmallow_union -import numpy -import packaging.version -from marshmallow import Schema, ValidationError, missing -from marshmallow import fields as marshmallow_fields - -from . import field_validators, raw_nodes -from .utils._docs import resolve_bioimageio_descrcription - -logger = logging.getLogger(__name__) - - -class DocumentedField: - """base class for all fields that aids in generating a documentation""" - - def __init__( - self, - *super_args, - short_bioimageio_description: typing.Union[str, typing.Callable[[], str]] = "", - bioimageio_description: typing.Union[str, typing.Callable[[], str]] = "", - bioimageio_description_order: typing.Optional[int] = None, - bioimageio_maybe_required: bool = False, # indicates that this field may be required, depending on other fields - bioimageio_examples_valid: typing.Optional[ - typing.Sequence[typing.Any] - ] = None, # valid examples to render in documentation - bioimageio_examples_invalid: typing.Optional[ - typing.Sequence[typing.Any] - ] = None, # invalid examples to render in documentation - **super_kwargs, - ): - bases = [b.__name__ for b in self.__class__.__bases__ if issubclass(b, marshmallow_fields.Field)] - if self.__class__.__name__ not in bases: - bases.insert(0, self.__class__.__name__) - - # todo: support examples for documentation - # if bioimageio_examples_valid is not None: - # valid_examples = - self.type_name = "โ†’".join(bases) - self.short_bioimageio_description = short_bioimageio_description - self.bioimageio_description = bioimageio_description - self.bioimageio_description_order = bioimageio_description_order - self.bioimageio_maybe_required = bioimageio_maybe_required - super().__init__(*super_args, **super_kwargs) # type: ignore - - -class Array(DocumentedField, marshmallow_fields.Field): - def __init__(self, inner: marshmallow_fields.Field, **kwargs): - self.inner = inner - super().__init__(**kwargs) - - @property - def dtype(self) -> typing.Union[typing.Type[int], typing.Type[float], typing.Type[str]]: - if isinstance(self.inner, Integer): - return int - elif isinstance(self.inner, Float): - return float - elif isinstance(self.inner, String): - return str - else: - raise NotImplementedError(self.inner) - - def _deserialize_inner(self, value): - if isinstance(value, list): - return [self._deserialize_inner(v) for v in value] - else: - return self.inner.deserialize(value) - - def deserialize( - self, - value: typing.Any, - attr: typing.Optional[str] = None, - data: typing.Optional[typing.Mapping[str, typing.Any]] = None, - **kwargs, - ): - value = self._deserialize_inner(value) - - if isinstance(value, list): - try: - return numpy.array(value, dtype=self.dtype) - except ValueError as e: - raise ValidationError(str(e)) from e - else: - return value - - -class DateTime(DocumentedField, marshmallow_fields.DateTime): - """ - Parses datetime in ISO8601 or if value already has datetime.datetime type - returns this value - """ - - def _deserialize(self, value, attr, data, **kwargs): - if isinstance(value, datetime.datetime): - return value - - return super()._deserialize(value, attr, data, **kwargs) - - -class Dict(DocumentedField, marshmallow_fields.Dict): - def __init__( - self, - keys: typing.Optional[DocumentedField] = None, - values: typing.Optional[DocumentedField] = None, - *super_args, - **super_kwargs, - ): - assert keys is None or isinstance(keys, DocumentedField) - assert values is None or isinstance(values, DocumentedField) - super().__init__(keys, values, *super_args, **super_kwargs) - # add types of dict keys and values - key = "Any" if self.key_field is None else self.key_field.type_name - value = "Any" if self.value_field is None else self.value_field.type_name - self.type_name += f"\\[{key}, {value}\\]" - - -class YamlDict(Dict): - """yaml friendly dict""" - - @staticmethod - def _make_yaml_friendly(obj): - if isinstance(obj, (list, tuple)): - return [YamlDict._make_yaml_friendly(ob) for ob in obj if ob is not missing] - elif isinstance(obj, dict): - return { - YamlDict._make_yaml_friendly(k): YamlDict._make_yaml_friendly(v) - for k, v in obj.items() - if v is not missing - } - elif obj is None or isinstance(obj, (float, int, str, bool)): - return obj - elif isinstance(obj, pathlib.PurePath): - return obj.as_posix() - elif isinstance(obj, raw_nodes.URI): - return str(obj) - elif isinstance(obj, (datetime.datetime, datetime.time)): - return obj.isoformat() - elif obj is missing: - return missing - else: - raise TypeError(f"Encountered YAML unfriendly type: {type(obj)}") - - def _serialize(self, value, attr, obj, **kwargs): - value = self._make_yaml_friendly(value) - return super()._serialize(value, attr, obj, **kwargs) - - -class RDF_Update(YamlDict): - def __init__( - self, - keys: typing.Optional[DocumentedField] = None, - values: typing.Optional[DocumentedField] = None, - *args, - **kwargs, - ): - if keys is None: - keys = String(bioimageio_description="RDF field names to overwrite") - - super().__init__(keys, values, *args, **kwargs) - - -class Email(DocumentedField, marshmallow_fields.Email): - pass - - -class Float(DocumentedField, marshmallow_fields.Float): - pass - - -class Integer(DocumentedField, marshmallow_fields.Integer): - pass - - -class List(DocumentedField, marshmallow_fields.List): - def __init__(self, instance: DocumentedField, *super_args, **super_kwargs): - assert isinstance(instance, DocumentedField), "classes not allowd to avoid trouble" - super().__init__(instance, *super_args, **super_kwargs) - self.type_name += f"\\[{self.inner.type_name}\\]" # add type of list elements - - -class Number(DocumentedField, marshmallow_fields.Number): - pass - - -class Nested(DocumentedField, marshmallow_fields.Nested): - def __init__(self, nested: Schema, *super_args, many: bool = False, **super_kwargs): - assert isinstance(nested, Schema) # schema classes cause all sorts of trouble (so we enforce instance) - assert not many, ( - "Use List(Nested(...)) or Nested(Schema(many=True)) instead! " - "see also https://github.com/marshmallow-code/marshmallow/issues/779" - "We only don't allow this to be more consistent and avoid bugs." - ) - super().__init__(nested, *super_args, **super_kwargs) - - self.type_name = self.schema.__class__.__name__ - if self.many: - self.type_name = f"List\\[{self.type_name}\\]" - - if not self.bioimageio_description: - self.bioimageio_description = self.schema.bioimageio_description - - if not self.short_bioimageio_description: - self.short_bioimageio_description = self.schema.short_bioimageio_description - - repeat_type_name = self.type_name if self.bioimageio_description else "" - add_to_descr = f" {repeat_type_name} is a Dict with the following keys:" - bioimageio_description_part = self.bioimageio_description - self.bioimageio_description = ( - lambda: resolve_bioimageio_descrcription(bioimageio_description_part) + add_to_descr - ) - - def _deserialize(self, value, attr, data, partial=None, **kwargs): - if not isinstance(value, dict): - raise ValidationError(f"Expected dictionary, but got {type(value).__name__}.") - - return super()._deserialize(value, attr, data, partial, **kwargs) - - -class Raw(DocumentedField, marshmallow_fields.Raw): - pass - - -class String(DocumentedField, marshmallow_fields.String): - pass - - -class Name(String): - def __init__( - self, - *, - validate: typing.Optional[ - typing.Union[ - typing.Callable[[typing.Any], typing.Any], typing.Iterable[typing.Callable[[typing.Any], typing.Any]] - ] - ] = None, - **kwargs, - ) -> None: - if validate is None: - validate = [] - - if isinstance(validate, typing.Iterable): - validate = list(validate) - else: - validate = [validate] - - validate.append( - field_validators.Predicate("__contains__", "/", invert_output=True, error="may not contain '/'") - ) - validate.append( - field_validators.Predicate("__contains__", "\\", invert_output=True, error="may not contain '\\'") - ) - super().__init__(validate=validate, **kwargs) - - -class DOI(String): - pass - - -class Tuple(DocumentedField, marshmallow_fields.Tuple): - def __init__(self, tuple_fields: typing.Sequence[DocumentedField], *args, **kwargs): - assert all(isinstance(tf, DocumentedField) for tf in tuple_fields) - super().__init__(tuple_fields, *args, **kwargs) - - def _serialize(self, value, attr, obj, **kwargs) -> typing.List: - value = super()._serialize(value, attr, obj, **kwargs) - return list(value) # return tuple as list - - def _jsonschema_type_mapping(self): - import marshmallow_jsonschema - - return { - "type": "array", - "items": [marshmallow_jsonschema.JSONSchema()._get_schema_for_field(self, tf) for tf in self.tuple_fields], - } - - -class Union(DocumentedField, marshmallow_union.Union): - _candidate_fields: typing.Iterable[typing.Union[DocumentedField, marshmallow_fields.Field]] - - def __init__(self, fields_, *super_args, **super_kwargs): - assert all(isinstance(f, DocumentedField) for f in fields_), "only DocumentedField instances (no classes)!" - super().__init__(fields_, *super_args, **super_kwargs) - self.type_name += f"\\[{' | '.join(cf.type_name for cf in self._candidate_fields)}\\]" # add types of options - - def _deserialize(self, value, attr=None, data=None, **kwargs): - try: - return super()._deserialize(value, attr=attr, data=data, **kwargs) - except ValidationError as e: - errors = sorted(e.messages, key=lambda msg: len(msg)) - messages = ["Errors in all options for this field. Fix any of the following errors:"] + errors - raise ValidationError(message=messages, field_name=attr) from e - - -class Axes(String): - def _deserialize(self, *args, **kwargs) -> str: - axes_str = super()._deserialize(*args, **kwargs) - valid_axes = self.metadata.get("valid_axes", "bitczyx") - if any(a not in valid_axes for a in axes_str): - raise ValidationError(f"Invalid axes! Valid axes consist of: {valid_axes}") - - return axes_str - - -class Dependencies(String): # todo: check format of dependency string - def _deserialize(self, *args, **kwargs) -> raw_nodes.Dependencies: - from . import schema - - dep_str = super()._deserialize(*args, **kwargs) - try: - manager, *file_parts = dep_str.split(":") - data = dict(manager=manager, file=":".join(file_parts)) - ret = schema.Dependencies().load(data) - except Exception as e: - raise ValidationError(f"Invalid dependency: {dep_str} ({e})") - - return ret - - -class ExplicitShape(List): - def __init__(self, **super_kwargs): - super().__init__(Integer(), **super_kwargs) - - -class ImportableSource(String): - @staticmethod - def _is_import(path): - return ":" not in path - - @staticmethod - def _is_filepath(path): - return ":" in path - - def _deserialize(self, *args, **kwargs) -> typing.Any: - source_str: str = super()._deserialize(*args, **kwargs) - if self._is_import(source_str): - last_dot_idx = source_str.rfind(".") - - module_name = source_str[:last_dot_idx] - object_name = source_str[last_dot_idx + 1 :] - - if not module_name: - raise ValidationError( - f"Missing module name in importable source: {source_str}. Is it just missing a dot?" - ) - - if not object_name: - raise ValidationError( - f"Missing object/callable name in importable source: {source_str}. Is it just missing a dot?" - ) - - return raw_nodes.ImportableModule(callable_name=object_name, module_name=module_name) - - elif self._is_filepath(source_str): - *module_uri_parts, object_name = source_str.split(":") - module_uri = ":".join(module_uri_parts).strip(":") - - source_file_field = Union( - [ - URL(), - Path(), - ] - ) - return raw_nodes.ImportableSourceFile( - callable_name=object_name, source_file=source_file_field.deserialize(module_uri) - ) - else: - raise ValidationError(source_str) - - def _serialize(self, value, attr, obj, **kwargs) -> typing.Optional[str]: - if value is None: - return None - elif isinstance(value, raw_nodes.ImportableModule): - return f"{value.module_name}.{value.callable_name}" - elif isinstance(value, raw_nodes.ImportableSourceFile): - return f"{value.source_file}:{value.callable_name}" - else: - raise TypeError(f"{value} has unexpected type {type(value)}") - - -class Kwargs(Dict): - def __init__( - self, - keys: String = String(), - values: typing.Optional[DocumentedField] = None, - bioimageio_description="Key word arguments.", - **super_kwargs, - ): - super().__init__(keys, values, bioimageio_description=bioimageio_description, **super_kwargs) - - -class Path(String): - def _deserialize(self, *args, **kwargs): - path_str = super()._deserialize(*args, **kwargs) - return pathlib.Path(path_str) - - def _serialize(self, value, attr, obj, **kwargs) -> typing.Optional[str]: - if isinstance(value, pathlib.PurePath): - value = value.as_posix() - - return super()._serialize(value, attr, obj, **kwargs) - - -class RelativeLocalPath(Path): - def __init__( - self, - *super_args, - validate: typing.Optional[ - typing.Union[ - typing.Callable[[typing.Any], typing.Any], typing.Iterable[typing.Callable[[typing.Any], typing.Any]] - ] - ] = None, - **super_kwargs, - ): - if validate is None: - validate = [] - elif callable(validate): - validate = [validate] - else: - validate = list(validate) - - super().__init__( - *super_args, - validate=validate - + [ - field_validators.Predicate("is_absolute", invert_output=True, error="expected relative path."), - field_validators.Attribute( - "as_posix", - [ - field_validators.ContainsNoneOf( - ":", error="expected local, relative file path." - ), # monkey patch to fail on urls - field_validators.Predicate( - "count", "..", invert_output=True, error="expected relative file path within model package." - ), - ], - is_getter_method=True, - ), - field_validators.Predicate( - "is_reserved", invert_output=True, error="invalid filename as it is reserved by the OS." - ), - ], - **super_kwargs, - ) - - def _serialize(self, value, attr, obj, **kwargs) -> typing.Optional[str]: - if value is not None and (not isinstance(value, pathlib.Path) or value.is_absolute()): - logger.warning(f"invalid local relative path: {value}") - - return super()._serialize(value, attr, obj, **kwargs) - - -class BioImageIO_ID(String): - def __init__( - self, - *super_args, - bioimageio_description: typing.Union[ - str, typing.Callable[[], str] - ] = "ID as shown on resource card on bioimage.io", - resource_type: typing.Optional[str] = None, - validate: typing.Optional[ - typing.Union[ - typing.Callable[[typing.Any], typing.Any], typing.Iterable[typing.Callable[[typing.Any], typing.Any]] - ] - ] = None, - **super_kwargs, - ): - from ._resolve_source import BIOIMAGEIO_COLLECTION_ENTRIES - - if validate is None: - validate = [] - - if isinstance(validate, typing.Iterable): - validate = list(validate) - else: - validate = [validate] - - if BIOIMAGEIO_COLLECTION_ENTRIES is not None: - error_msg = "'{input}' is not a valid BioImage.IO ID" - if resource_type is not None: - error_msg += f" of type {resource_type}" - - validate.append( - field_validators.OneOf( - { - k - for k, (v_type, _) in BIOIMAGEIO_COLLECTION_ENTRIES.items() - if resource_type is None or resource_type == v_type - }, - error=error_msg, - ) - ) - - super().__init__(*super_args, bioimageio_description=bioimageio_description, **super_kwargs) - - -class ProcMode(String): - all_modes = ("fixed", "per_dataset", "per_sample") - explanations = { - "fixed": "fixed values for mean and variance", - "per_dataset": "mean and variance are computed for the entire dataset", - "per_sample": "mean and variance are computed for each sample individually", - } - - def __init__( - self, - *, - validate: typing.Optional[ - typing.Union[ - typing.Callable[[typing.Any], typing.Any], typing.Iterable[typing.Callable[[typing.Any], typing.Any]] - ] - ] = None, - valid_modes: typing.Sequence[str] = all_modes, - required: bool = True, - bioimageio_description: str = "", - **kwargs, - ) -> None: - assert all(vm in self.all_modes for vm in valid_modes), valid_modes - self.valid_modes = valid_modes # used in doc generation script 'generate_processing_docs.py' - if validate is None: - validate = [] - - if isinstance(validate, typing.Iterable): - validate = list(validate) - else: - validate = [validate] - - validate.append(field_validators.OneOf(valid_modes)) - if not bioimageio_description: - bioimageio_description = f"One of {', '.join([f'{vm} ({self.explanations[vm]})' for vm in valid_modes])}" - super().__init__(validate=validate, required=required, bioimageio_description=bioimageio_description, **kwargs) - - -class SHA256(String): - def _deserialize(self, *args, **kwargs): - value_str = super()._deserialize(*args, **kwargs) - return value_str - - -class Version(String): - def _deserialize( - self, - value: typing.Any, - attr: typing.Optional[str], - data: typing.Optional[typing.Mapping[str, typing.Any]], - **kwargs, - ): - return packaging.version.Version(str(value)) - - -class URI(String): - def _deserialize(self, value, attr, data, **kwargs) -> typing.Any: - try: - return raw_nodes.URI(uri_string=value) - except Exception as e: - raise ValidationError(str(e)) from e - - -class URL(URI): - def __init__(self, *, validate: typing.Sequence[field_validators.Validator] = tuple(), **kwargs): - validate = list(validate) + [field_validators.URL(schemes=["http", "https"])] - super().__init__(validate=validate, **kwargs) diff --git a/bioimageio/spec/shared/node_transformer.py b/bioimageio/spec/shared/node_transformer.py deleted file mode 100644 index b328e94a6..000000000 --- a/bioimageio/spec/shared/node_transformer.py +++ /dev/null @@ -1,358 +0,0 @@ -import dataclasses -import os -import pathlib -import typing - -from marshmallow import missing -from marshmallow.utils import _Missing - -from . import raw_nodes -from ._resolve_source import resolve_source as _resolve_source -from .raw_nodes import URI - -try: - from typing import Literal -except ImportError: - from typing_extensions import Literal # type: ignore - -GenericResolvedNode = typing.TypeVar("GenericResolvedNode", bound=raw_nodes.RawNode) -GenericRawNode = typing.TypeVar("GenericRawNode", bound=raw_nodes.RawNode) -GenericRawRD = typing.TypeVar("GenericRawRD", bound=raw_nodes.ResourceDescription) - - -def iter_fields(node: GenericRawNode): - for field in dataclasses.fields(node): - yield field.name, getattr(node, field.name) - - -class NodeVisitor: - def visit(self, node: typing.Any) -> None: - method = "visit_" + node.__class__.__name__ - - visitor: typing.Callable[[typing.Any], typing.Any] = getattr(self, method, self.generic_visit) - - visitor(node) - - def generic_visit(self, node): - """Called if no explicit visitor function exists for a node.""" - - if isinstance(node, raw_nodes.RawNode): - for field, value in iter_fields(node): - self.visit(value) - elif isinstance(node, dict): - for subnode in node.values(): - self.visit(subnode) - elif isinstance(node, (tuple, list)): - for subnode in node: - self.visit(subnode) - - -class Transformer: - def transform(self, node: typing.Any, **kwargs) -> typing.Any: - method = "transform_" + node.__class__.__name__ - - transformer = getattr(self, method, self.generic_transformer) - - return transformer(node, **kwargs) # noqa - - def generic_transformer(self, node: typing.Any, **kwargs) -> typing.Any: - return node - - def transform_list(self, node: list, **kwargs) -> list: - return [self.transform(subnode, **kwargs) for subnode in node] - - def transform_dict(self, node: dict, **kwargs) -> dict: - return {key: self.transform(value, **kwargs) for key, value in node.items()} - - -class NestedUpdateTransformer: - """update a nested dict/list/raw_node with a nested dict/list update""" - - DROP = "DROP" - KEEP = "KEEP" - - def transform(self, node: typing.Any, update: typing.Any) -> typing.Any: - if update == self.KEEP: - return node - - if isinstance(update, raw_nodes.RawNode): - raise TypeError("updating with raw node is not allowed") - - method = "transform_" + node.__class__.__name__ - transformer = getattr(self, method, self.generic_transformer) - - return transformer(node, update) # noqa - - def generic_transformer(self, node: typing.Any, update: typing.Any) -> typing.Any: - if isinstance(node, raw_nodes.RawNode): - return self.transform_node(node, update) - else: - return update - - def transform_node( - self, node: raw_nodes.RawNode, update: typing.Union[dict, typing.Any] - ) -> typing.Union[raw_nodes.RawNode, typing.Any]: - if isinstance(update, dict): - updated_kwargs = { - name: self.transform(value, update.get(name, self.KEEP)) for name, value in iter_fields(node) - } - - if "format_version" in update: - # add new fields - for k in set(update) - set(updated_kwargs): - updated_kwargs[k] = update[k] - - # todo: resolve raw node for updated format_version - raise NotImplementedError("Updating format_version not yet implemented") - else: - invalid_updates = set(update) - set(updated_kwargs) - if invalid_updates: - raise ValueError(f"Got unexpected updates for non-existing fields: {invalid_updates}") - - return dataclasses.replace(node, **updated_kwargs) - else: - return update - - def transform_list(self, node: list, update: typing.Union[list, typing.Any]) -> typing.Union[list, typing.Any]: - if isinstance(update, list): - if len(update) < len(node): - update = update + [self.KEEP] * (len(node) - len(update)) - - if len(node) < len(update): - node = node + [self.DROP] * (len(update) - len(node)) - - node = [self.transform(n, u) for n, u in zip(node, update)] - return [e for e in node if e != self.DROP] - else: - return update - - def transform_dict(self, node: dict, update: typing.Union[dict, typing.Any]) -> typing.Union[dict, typing.Any]: - if isinstance(update, dict): - ret = {k: self.transform(v, update.get(k, self.KEEP)) for k, v in node.items()} - for k, v in update.items(): - if k not in ret: - ret[k] = v - - return {k: v for k, v in ret.items() if v != self.DROP} - else: - return update - - -class NodeTransformer(Transformer): - def generic_transformer(self, node: GenericRawNode, **kwargs) -> GenericRawNode: - if isinstance(node, raw_nodes.RawNode): - return dataclasses.replace( - node, **{name: self.transform(value, **kwargs) for name, value in iter_fields(node)} - ) - else: - return super().generic_transformer(node, **kwargs) - - -class NodeTransformerKnownParent(NodeTransformer): - def generic_transformer( - self, - node: GenericRawNode, - name: typing.Optional[str] = None, - parent: typing.Optional[raw_nodes.RawNode] = None, - **kwargs, - ) -> GenericRawNode: - if isinstance(node, raw_nodes.RawNode): - return dataclasses.replace( - node, **{n: self.transform(value, name=n, parent=node) for n, value in iter_fields(node)} - ) - else: - return super().generic_transformer(node, name=name, parent=parent) - - -class RawNodePackageTransformer(NodeTransformer): - """Transforms raw node fields specified by ._include_in_package to local relative paths. - Adds remote resources to given dictionary.""" - - def __init__( - self, - remote_resources: typing.Dict[str, typing.Union[pathlib.PurePath, URI]], - root: typing.Union[pathlib.Path, URI], - ): - super().__init__() - self.remote_resources = remote_resources - self.root = root - - def _transform_resource( - self, resource: typing.Union[typing.List[typing.Union[pathlib.PurePath, URI]], pathlib.PurePath, URI] - ) -> typing.Union[typing.List[pathlib.Path], _Missing, pathlib.Path]: - if isinstance(resource, list): - return [self._transform_resource(r) for r in resource] - elif resource is missing: - return missing - elif isinstance(resource, pathlib.PurePath): - name_from = resource - if resource.is_absolute(): - folder_in_package = "" - else: - if resource.parent.as_posix() == ".": - folder_in_package = "" - else: - folder_in_package = resource.parent.as_posix() + "/" - - resource = self.root / resource - - elif isinstance(resource, URI): - if ( - resource.authority == "zenodo.org" - and resource.path.startswith("/api/records/") - and resource.path.endswith("/content") - ): - name_from = pathlib.PurePath(resource.path[: -len("/content")].strip("/")) - else: - name_from = pathlib.PurePath(resource.path or "unknown") - folder_in_package = "" - else: - raise TypeError(f"Unexpected type {type(resource)} for {resource}") - - stem = name_from.stem - suffix = name_from.suffix - - conflict_free_name = f"{folder_in_package}{stem}{suffix}" - for i in range(100000): - existing_resource = self.remote_resources.get(conflict_free_name) - if existing_resource is not None and existing_resource != resource: - conflict_free_name = f"{folder_in_package}{stem}-{i}{suffix}" - else: - break - else: - raise ValueError(f"Attempting to pack too many resources with name {stem}{suffix}") - - self.remote_resources[conflict_free_name] = resource - - return pathlib.Path(conflict_free_name) - - def generic_transformer(self, node: GenericRawNode, **kwargs) -> GenericRawNode: - if isinstance(node, raw_nodes.RawNode): - resolved_data = { - field.name: self.transform(getattr(node, field.name), **kwargs) for field in dataclasses.fields(node) - } - for incl_field in node._include_in_package: - field_value = resolved_data[incl_field] - if field_value is not missing: # optional fields might be missing - resolved_data[incl_field] = self._transform_resource(field_value) - - return dataclasses.replace(node, **resolved_data) - else: - return super().generic_transformer(node, **kwargs) - - -class AbsoluteToRelativePathTransformer(NodeTransformer): - def __init__(self, *, root: typing.Union[os.PathLike, URI]): - if isinstance(root, URI): - self.root: typing.Union[pathlib.Path, URI] = root - else: - self.root = pathlib.Path(root).resolve() - - def transform_ImportableSourceFile( - self, node: raw_nodes.ImportableSourceFile, **kwargs - ) -> raw_nodes.ImportableSourceFile: - if isinstance(node.source_file, pathlib.Path) and node.source_file.is_absolute(): - if not isinstance(self.root, pathlib.Path): - raise TypeError(f"Cannot convert absolute path '{node.source_file}' with URI root '{self.root}'") - sf = node.source_file.relative_to(self.root) - return raw_nodes.ImportableSourceFile(source_file=sf, callable_name=node.callable_name) - else: - return node - - def _transform_Path(self, leaf: pathlib.Path): - if leaf.is_absolute(): - if not isinstance(self.root, pathlib.Path): - raise TypeError(f"Cannot convert absolute path '{leaf}' with URI root '{self.root}'") - # to make leaf relative to root, leaf needs to be resolved, because root is resolved - return leaf.resolve().relative_to(self.root) - else: - return leaf - - def transform_PosixPath(self, leaf: pathlib.PosixPath, **kwargs) -> pathlib.Path: - return self._transform_Path(leaf) - - def transform_WindowsPath(self, leaf: pathlib.WindowsPath, **kwargs) -> pathlib.Path: - return self._transform_Path(leaf) - - -class RelativePathTransformer(NodeTransformer): - def __init__(self, *, root: typing.Union[os.PathLike, URI]): - if isinstance(root, URI): - self.root: typing.Union[pathlib.Path, URI] = root - else: - self.root = pathlib.Path(root).resolve() - - def transform_URI(self, node: URI, **kwargs) -> typing.Union[URI, pathlib.Path]: - if node.scheme == "file": - assert not node.authority - assert not node.query - assert not node.fragment - return self._transform_Path(pathlib.Path(node.path)) - - return node - - def _transform_Path(self, leaf: pathlib.PurePath): - return self.root / leaf - - def transform_PurePath(self, leaf: pathlib.PurePath, **kwargs) -> typing.Union[URI, pathlib.Path]: - return self._transform_Path(leaf) - - def transform_PurePosixPath(self, leaf: pathlib.PurePosixPath, **kwargs) -> typing.Union[URI, pathlib.Path]: - return self._transform_Path(leaf) - - def transform_PureWindowsPath(self, leaf: pathlib.PureWindowsPath, **kwargs) -> typing.Union[URI, pathlib.Path]: - return self._transform_Path(leaf) - - def transform_PosixPath(self, leaf: pathlib.PosixPath, **kwargs) -> typing.Union[URI, pathlib.Path]: - return self._transform_Path(leaf) - - def transform_WindowsPath(self, leaf: pathlib.WindowsPath, **kwargs) -> typing.Union[URI, pathlib.Path]: - return self._transform_Path(leaf) - - def transform_ImportableSourceFile( - self, node: raw_nodes.ImportableSourceFile, **kwargs - ) -> raw_nodes.ImportableSourceFile: - if isinstance(node.source_file, URI): - return node - elif isinstance(node.source_file, pathlib.Path): - if node.source_file.is_absolute(): - return node - else: - return raw_nodes.ImportableSourceFile( - source_file=self.root / node.source_file, callable_name=node.callable_name - ) - else: - raise TypeError( - f"Unexpected type '{type(node.source_file)}' for raw_nodes.ImportableSourceFile.source_file '{node.source_file}'" - ) - - -class UriNodeTransformer(NodeTransformerKnownParent, RelativePathTransformer): - def __init__(self, *, root_path: os.PathLike, uri_only_if_in_package: bool = False): - super().__init__(root=root_path) - self.uri_only_if_in_package = uri_only_if_in_package - - def transform_URI( - self, - node: URI, - *, - name: typing.Optional[str] = None, - parent: typing.Optional[raw_nodes.RawNode] = None, - **kwargs, - ) -> typing.Union[URI, pathlib.Path]: - if self.uri_only_if_in_package and ((name is None or parent is None) or name not in parent._include_in_package): - return node - else: - local_path = _resolve_source(node, root_path=self.root) - return local_path - - def transform_ImportableSourceFile( - self, node: raw_nodes.ImportableSourceFile, **kwargs - ) -> raw_nodes.ResolvedImportableSourceFile: - return raw_nodes.ResolvedImportableSourceFile( - source_file=_resolve_source(node.source_file, self.root), callable_name=node.callable_name - ) - - def transform_ImportableModule(self, node: raw_nodes.ImportableModule, **kwargs) -> raw_nodes.LocalImportableModule: - r = self.root if isinstance(self.root, pathlib.Path) else pathlib.Path() - return raw_nodes.LocalImportableModule(**dataclasses.asdict(node), root_path=r) diff --git a/bioimageio/spec/shared/raw_nodes.py b/bioimageio/spec/shared/raw_nodes.py deleted file mode 100644 index 1b0dc7664..000000000 --- a/bioimageio/spec/shared/raw_nodes.py +++ /dev/null @@ -1,216 +0,0 @@ -"""shared raw nodes that shared transformers act on - -raw nodes are the deserialized equivalent to the content of any RDF. -serialization and deserialization are defined in schema: -RDF <--schema--> raw nodes -""" -import dataclasses -import os -import pathlib -from dataclasses import dataclass -from typing import ClassVar, List, Optional, Sequence, Union -from urllib.parse import urlparse -from urllib.request import url2pathname - -import packaging.version -from marshmallow import missing -from marshmallow.utils import _Missing - -try: - from typing import get_args, get_origin -except ImportError: - from typing_extensions import get_args, get_origin # type: ignore - - -@dataclass -class RawNode: - _include_in_package: ClassVar[Sequence[str]] = tuple() # todo: move to field meta data - - def __post_init__(self): - for f in dataclasses.fields(self): - if getattr(self, f.name) is missing and ( - get_origin(f.type) is not Union or not isinstance(missing, get_args(f.type)) - ): - raise TypeError(f"{self.__class__}.__init__() missing required argument: '{f.name}'") - - field_names = [f.name for f in dataclasses.fields(self)] - for incl_in_package in self._include_in_package: - assert incl_in_package in field_names - - -@dataclass -class URI(RawNode): - """URI as scheme:[//authority]path[?query][#fragment]""" - - uri_string: Optional[str] = None # for convenience: init from string; this should be dataclasses.InitVar, - # but due to a bug in dataclasses.replace in py3.7 (https://bugs.python.org/issue36470) it is not. - scheme: str = missing - authority: str = "" - path: str = missing - query: str = "" - fragment: str = "" - - def __str__(self): - """scheme:[//authority]path[?query][#fragment]""" - return ( - self.scheme - + ":" - + ("//" + self.authority if self.authority else "") - + self.path - + ("?" + self.query if self.query else "") - + ("#" + self.fragment if self.fragment else "") - ) - - def __truediv__(self, other): - """Analog to pathlib.Path truediv concatenates the path element of a URI with a string or relative path. - Absolute paths or URIs are not concatenated, but returned instead of self analog to pathlib.Path() / - """ - if isinstance(other, (str, os.PathLike)): - other = pathlib.Path(other) - if other.is_absolute(): - return other - else: - other = pathlib.PurePosixPath(other) - if ( - self.authority == "zenodo.org" - and self.path.startswith("/api/records/") - and self.path.endswith("/content") - ): - new_path = (pathlib.PurePosixPath(self.path).parent / other / "content").as_posix() - else: - new_path = (pathlib.PurePosixPath(self.path) / other).as_posix() - return dataclasses.replace(self, path=new_path, uri_string=None) - elif isinstance(other, URI): - return other - else: - raise TypeError(f"Unexpected type {type(other)} of {other}.") - - @property - def parent(self): - path = pathlib.PurePosixPath(self.path) - if self.authority == "zenodo.org" and self.path.startswith("/api/records/") and self.path.endswith("/content"): - parent_path = (path.parent.parent / "content").as_posix() - else: - parent_path = path.parent.as_posix() - - return dataclasses.replace(self, path=parent_path, uri_string=None) - - def __post_init__(self): - uri_string = self.uri_string # should be InitVar, see comment at definition above - uri_components = [self.scheme, self.authority, self.path, self.query, self.fragment] - if uri_string is None: - pass - elif any(uri_components): - raise ValueError(f"Either specify uri_string(={uri_string}) or uri components(={uri_components})") - elif isinstance(uri_string, str): - self.uri_string = None # not required if 'uri_string' would be InitVar, see comment at definition above - uri = urlparse(uri_string) - if uri.scheme == "file": - # account for leading '/' for windows paths, e.g. '/C:/folder' - # see https://stackoverflow.com/questions/43911052/urlparse-on-a-windows-file-scheme-uri-leaves-extra-slash-at-start - path = pathlib.Path(url2pathname(uri.path)).as_posix() - else: - path = uri.path - - self.scheme = uri.scheme - self.authority = uri.netloc - self.path = path - self.query = uri.query - self.fragment = uri.fragment - else: - raise TypeError(uri_string) - - if not self.scheme: - raise ValueError("Empty URI scheme component") - elif len(self.scheme) == 1: - raise ValueError(f"Invalid URI scheme of len 1: {self.scheme}") # fail for windows paths with drive letter - - super().__post_init__() - - -@dataclass -class ResourceDescription(RawNode): - """Bare minimum for resource description nodes usable with the shared IO_Base class. - This is not part of any specification for the BioImage.IO Model Zoo and, e.g. - not to be confused with the definition of the general RDF. - """ - - format_version: str = missing - name: str = missing - type: str = missing - version: Union[_Missing, packaging.version.Version] = missing - root_path: Union[pathlib.Path, URI] = pathlib.Path() # note: `root_path` is not officially part of the spec, - # but any RDF has it as it is the folder containing the rdf.yaml - - -@dataclass -class Dependencies(RawNode): - _include_in_package = ("file",) - - manager: str = missing - file: Union[URI, pathlib.Path] = missing - - def __str__(self): - return f"{self.manager}:{self.file}" - - -@dataclass -class ParametrizedInputShape(RawNode): - min: List[int] = missing - step: List[int] = missing - - def __len__(self): - return len(self.min) - - -@dataclass -class ImplicitOutputShape(RawNode): - reference_tensor: str = missing - scale: List[Union[float, None]] = missing - offset: List[float] = missing - - def __len__(self): - return len(self.scale) - - -@dataclass -class ImportableModule(RawNode): - module_name: str = missing - callable_name: str = missing - - def __str__(self): - return f"{self.module_name}:{self.callable_name}" - - -@dataclass -class LocalImportableModule(ImportableModule): - """intermediate between raw_nodes.ImportableModule and core.resource_io.nodes.ImportedSource. - - Used by SourceNodeTransformer - """ - - root_path: pathlib.Path = missing - - -@dataclass -class ImportableSourceFile(RawNode): - _include_in_package = ("source_file",) - - callable_name: str = missing - source_file: Union[URI, pathlib.Path] = missing - - def __str__(self): - return f"{self.source_file}:{self.callable_name}" - - -@dataclass -class ResolvedImportableSourceFile(ImportableSourceFile): - """intermediate between raw_nodes.ImportableSourceFile and core.resource_io.nodes.ImportedSource. - - Used by SourceNodeTransformer - """ - - source_file: pathlib.Path = missing - - -ImportableSource = Union[ImportableModule, ImportableSourceFile, ResolvedImportableSourceFile, LocalImportableModule] diff --git a/bioimageio/spec/shared/schema.py b/bioimageio/spec/shared/schema.py deleted file mode 100644 index 19e28410c..000000000 --- a/bioimageio/spec/shared/schema.py +++ /dev/null @@ -1,143 +0,0 @@ -import warnings -from types import ModuleType -from typing import ClassVar, List - -from marshmallow import INCLUDE, Schema, ValidationError, post_dump, post_load, validates, validates_schema - -from bioimageio.spec.shared import fields -from . import raw_nodes -from .common import ValidationWarning - - -class SharedBioImageIOSchema(Schema): - raw_nodes: ClassVar[ModuleType] = raw_nodes # to be overwritten in subclass by version specific raw_nodes module - short_bioimageio_description: ClassVar[str] = "" - bioimageio_description: ClassVar[str] = "" - - @post_load - def make_object(self, data, **kwargs): - if data is None: - return None - - this_type = getattr(self.raw_nodes, self.__class__.__name__, None) - if this_type is None: - # attempt import from shared raw nodes - this_type = getattr(raw_nodes, self.__class__.__name__, None) - if this_type is None: - raise NotImplementedError( - f"neither {self.raw_nodes} nor {raw_nodes} has attribute {self.__class__.__name__}." - ) - - try: - return this_type(**data) - except TypeError as e: - e.args += (f"when initializing {this_type} from {self}",) - raise e - - def warn(self, field: str, msg: str): - """warn about a field with a ValidationWarning""" - # simple_field_name = field.split("[")[0] # field may include [idx] - # field_instance = self.fields[simple_field_name] # todo: account for : - assert ": " not in field - # todo: add spec trail to field - # e.g. something similar to field = ":".join(self.context.get("field_path", []) + [field]) - # or: ":".join(field_instance.spec_trail) - msg = f"{field}: {msg}" - warnings.warn(msg, category=ValidationWarning) - - -class SharedProcessingSchema(Schema): - """Used to generate Pre- and Postprocessing documentation. - - Define Pre-/Postprocessing operator schemas in the Preprocessing/Postprocessing schema that inherit from this class, - and they will be rendered in the documentation (scripts/generate_processing_docs.py). - - example: bioimageio.spec.model.v0_3.schema.Processing.binarize - """ - - bioimageio_description: ClassVar[str] - - -class WithUnknown(SharedBioImageIOSchema): - """allows to keep unknown fields on load and dump them the 'unknown' attribute of the data to serialize""" - - field_name_unknown_dict = "unknown" - - class Meta: - unknown = INCLUDE - - @post_load - def make_object(self, data, **kwargs): - obj = super().make_object(data, **kwargs) - assert hasattr( - obj, self.field_name_unknown_dict - ), f"expected raw node to have attribute {self.field_name_unknown_dict}" - return obj - - @post_dump(pass_original=True) - def keep_unknowns(self, output, orig, **kwargs): - if orig and hasattr(orig, self.field_name_unknown_dict): - out_w_unknown = fields.YamlDict()._serialize( - getattr(orig, self.field_name_unknown_dict), self.field_name_unknown_dict, self - ) - out_w_unknown.update(output) - return out_w_unknown - else: - return output - - -class Dependencies(SharedBioImageIOSchema): - manager = fields.String(bioimageio_description="Dependency manager For example: 'conda', 'maven', or 'pip'") - file = fields.Union( - [fields.URI(), fields.Path()], - bioimageio_description="Dependency file. For example: 'environment.yaml', 'pom.xml', or 'requirements.txt'", - ) - - -class ParametrizedInputShape(SharedBioImageIOSchema): - min = fields.List( - fields.Integer(), required=True, bioimageio_description="The minimum input shape with same length as `axes`" - ) - step = fields.List( - fields.Integer(), required=True, bioimageio_description="The minimum shape change with same length as `axes`" - ) - - @validates_schema - def matching_lengths(self, data, **kwargs): - min_ = data["min"] - step = data["step"] - if min_ is None or step is None: - return - - if len(min_) != len(step): - raise ValidationError(f"'min' and 'step' have to have the same length! (min: {min_}, step: {step})") - - -class ImplicitOutputShape(SharedBioImageIOSchema): - reference_tensor = fields.String(required=True, bioimageio_description="Name of the reference tensor.") - scale = fields.List( - fields.Float(allow_none=True), - required=True, - bioimageio_description="'output_pix/input_pix' for each dimension.", - ) - offset = fields.List( - fields.Float(), required=True, bioimageio_description="Position of origin wrt to input. Multiple of 0.5." - ) - - @validates_schema - def matching_lengths(self, data, **kwargs): - scale = data["scale"] - offset = data["offset"] - if len(scale) != len(offset): - raise ValidationError(f"scale {scale} has to have same length as offset {offset}!") - # if we have an expanded dimension, make sure that it's offet is not zero - if any(sc is None for sc in scale): - for sc, off in zip(scale, offset): - if sc is None and off == 0: - raise ValidationError("Offset must not be 0 for scale null") - - @validates("offset") - def double_offset_is_int(self, value: List[float]): - for v in value: - if 2 * v != int(2 * v): - raise ValidationError(f"offset {v} in {value} not a multiple of 0.5!") diff --git a/bioimageio/spec/shared/utils/__init__.py b/bioimageio/spec/shared/utils/__init__.py deleted file mode 100644 index e00fbae78..000000000 --- a/bioimageio/spec/shared/utils/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -from ._docs import get_ref_url, resolve_bioimageio_descrcription, snake_case_to_camel_case -from ._various import is_valid_orcid_id diff --git a/bioimageio/spec/shared/utils/_various.py b/bioimageio/spec/shared/utils/_various.py deleted file mode 100644 index 87582ffab..000000000 --- a/bioimageio/spec/shared/utils/_various.py +++ /dev/null @@ -1,6 +0,0 @@ -def is_valid_orcid_id(orcid_id: str): - """adapted from stdnum.iso7064.mod_11_2.checksum()""" - check = 0 - for n in orcid_id: - check = (2 * check + int(10 if n == "X" else n)) % 11 - return check == 1 diff --git a/bioimageio/spec/static/licenses.json b/bioimageio/spec/static/spdx_licenses.json similarity index 78% rename from bioimageio/spec/static/licenses.json rename to bioimageio/spec/static/spdx_licenses.json index 9c7cdbc7a..25782766a 100644 --- a/bioimageio/spec/static/licenses.json +++ b/bioimageio/spec/static/spdx_licenses.json @@ -1,5766 +1,7011 @@ -{ - "licenseListVersion": "3.13", - "licenses": [ - { - "reference": "https://spdx.org/licenses/bzip2-1.0.6.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/bzip2-1.0.6.json", - "referenceNumber": 0, - "name": "bzip2 and libbzip2 License v1.0.6", - "licenseId": "bzip2-1.0.6", - "seeAlso": [ - "https://sourceware.org/git/?p\u003dbzip2.git;a\u003dblob;f\u003dLICENSE;hb\u003dbzip2-1.0.6", - "http://bzip.org/1.0.5/bzip2-manual-1.0.5.html" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/Glulxe.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Glulxe.json", - "referenceNumber": 1, - "name": "Glulxe License", - "licenseId": "Glulxe", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/Glulxe" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/Parity-7.0.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Parity-7.0.0.json", - "referenceNumber": 2, - "name": "The Parity Public License 7.0.0", - "licenseId": "Parity-7.0.0", - "seeAlso": [ - "https://paritylicense.com/versions/7.0.0.html" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/OML.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/OML.json", - "referenceNumber": 3, - "name": "Open Market License", - "licenseId": "OML", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/Open_Market_License" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/UCL-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/UCL-1.0.json", - "referenceNumber": 4, - "name": "Upstream Compatibility License v1.0", - "licenseId": "UCL-1.0", - "seeAlso": [ - "https://opensource.org/licenses/UCL-1.0" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/UPL-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/UPL-1.0.json", - "referenceNumber": 5, - "name": "Universal Permissive License v1.0", - "licenseId": "UPL-1.0", - "seeAlso": [ - "https://opensource.org/licenses/UPL" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/BSD-Protection.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/BSD-Protection.json", - "referenceNumber": 6, - "name": "BSD Protection License", - "licenseId": "BSD-Protection", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/BSD_Protection_License" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/OCLC-2.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/OCLC-2.0.json", - "referenceNumber": 7, - "name": "OCLC Research Public License 2.0", - "licenseId": "OCLC-2.0", - "seeAlso": [ - "http://www.oclc.org/research/activities/software/license/v2final.htm", - "https://opensource.org/licenses/OCLC-2.0" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/eCos-2.0.html", - "isDeprecatedLicenseId": true, - "detailsUrl": "https://spdx.org/licenses/eCos-2.0.json", - "referenceNumber": 8, - "name": "eCos license version 2.0", - "licenseId": "eCos-2.0", - "seeAlso": [ - "https://www.gnu.org/licenses/ecos-license.html" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/Multics.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Multics.json", - "referenceNumber": 9, - "name": "Multics License", - "licenseId": "Multics", - "seeAlso": [ - "https://opensource.org/licenses/Multics" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/IPL-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/IPL-1.0.json", - "referenceNumber": 10, - "name": "IBM Public License v1.0", - "licenseId": "IPL-1.0", - "seeAlso": [ - "https://opensource.org/licenses/IPL-1.0" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/IPA.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/IPA.json", - "referenceNumber": 11, - "name": "IPA Font License", - "licenseId": "IPA", - "seeAlso": [ - "https://opensource.org/licenses/IPA" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/eGenix.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/eGenix.json", - "referenceNumber": 12, - "name": "eGenix.com Public License 1.1.0", - "licenseId": "eGenix", - "seeAlso": [ - "http://www.egenix.com/products/eGenix.com-Public-License-1.1.0.pdf", - "https://fedoraproject.org/wiki/Licensing/eGenix.com_Public_License_1.1.0" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/Glide.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Glide.json", - "referenceNumber": 13, - "name": "3dfx Glide License", - "licenseId": "Glide", - "seeAlso": [ - "http://www.users.on.net/~triforce/glidexp/COPYING.txt" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/Entessa.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Entessa.json", - "referenceNumber": 14, - "name": "Entessa Public License v1.0", - "licenseId": "Entessa", - "seeAlso": [ - "https://opensource.org/licenses/Entessa" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/FSFUL.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/FSFUL.json", - "referenceNumber": 15, - "name": "FSF Unlimited License", - "licenseId": "FSFUL", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/FSF_Unlimited_License" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/Nunit.html", - "isDeprecatedLicenseId": true, - "detailsUrl": "https://spdx.org/licenses/Nunit.json", - "referenceNumber": 16, - "name": "Nunit License", - "licenseId": "Nunit", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/Nunit" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/MPL-2.0-no-copyleft-exception.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/MPL-2.0-no-copyleft-exception.json", - "referenceNumber": 17, - "name": "Mozilla Public License 2.0 (no copyleft exception)", - "licenseId": "MPL-2.0-no-copyleft-exception", - "seeAlso": [ - "http://www.mozilla.org/MPL/2.0/", - "https://opensource.org/licenses/MPL-2.0" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/libpng-2.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/libpng-2.0.json", - "referenceNumber": 18, - "name": "PNG Reference Library version 2", - "licenseId": "libpng-2.0", - "seeAlso": [ - "http://www.libpng.org/pub/png/src/libpng-LICENSE.txt" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/OLDAP-2.2.1.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/OLDAP-2.2.1.json", - "referenceNumber": 19, - "name": "Open LDAP Public License v2.2.1", - "licenseId": "OLDAP-2.2.1", - "seeAlso": [ - "http://www.openldap.org/devel/gitweb.cgi?p\u003dopenldap.git;a\u003dblob;f\u003dLICENSE;hb\u003d4bc786f34b50aa301be6f5600f58a980070f481e" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/curl.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/curl.json", - "referenceNumber": 20, - "name": "curl License", - "licenseId": "curl", - "seeAlso": [ - "https://github.com/bagder/curl/blob/master/COPYING" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/ANTLR-PD.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/ANTLR-PD.json", - "referenceNumber": 21, - "name": "ANTLR Software Rights Notice", - "licenseId": "ANTLR-PD", - "seeAlso": [ - "http://www.antlr2.org/license.html" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/CC-BY-SA-2.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CC-BY-SA-2.0.json", - "referenceNumber": 22, - "name": "Creative Commons Attribution Share Alike 2.0 Generic", - "licenseId": "CC-BY-SA-2.0", - "seeAlso": [ - "https://creativecommons.org/licenses/by-sa/2.0/legalcode" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/LiLiQ-P-1.1.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/LiLiQ-P-1.1.json", - "referenceNumber": 23, - "name": "Licence Libre du Quรฉbec โ€“ Permissive version 1.1", - "licenseId": "LiLiQ-P-1.1", - "seeAlso": [ - "https://forge.gouv.qc.ca/licence/fr/liliq-v1-1/", - "http://opensource.org/licenses/LiLiQ-P-1.1" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/TCP-wrappers.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/TCP-wrappers.json", - "referenceNumber": 24, - "name": "TCP Wrappers License", - "licenseId": "TCP-wrappers", - "seeAlso": [ - "http://rc.quest.com/topics/openssh/license.php#tcpwrappers" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/Unicode-DFS-2016.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Unicode-DFS-2016.json", - "referenceNumber": 25, - "name": "Unicode License Agreement - Data Files and Software (2016)", - "licenseId": "Unicode-DFS-2016", - "seeAlso": [ - "http://www.unicode.org/copyright.html" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/ODbL-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/ODbL-1.0.json", - "referenceNumber": 26, - "name": "Open Data Commons Open Database License v1.0", - "licenseId": "ODbL-1.0", - "seeAlso": [ - "http://www.opendatacommons.org/licenses/odbl/1.0/", - "https://opendatacommons.org/licenses/odbl/1-0/" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/LPPL-1.3a.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/LPPL-1.3a.json", - "referenceNumber": 27, - "name": "LaTeX Project Public License v1.3a", - "licenseId": "LPPL-1.3a", - "seeAlso": [ - "http://www.latex-project.org/lppl/lppl-1-3a.txt" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/CERN-OHL-1.2.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CERN-OHL-1.2.json", - "referenceNumber": 28, - "name": "CERN Open Hardware Licence v1.2", - "licenseId": "CERN-OHL-1.2", - "seeAlso": [ - "https://www.ohwr.org/project/licenses/wikis/cern-ohl-v1.2" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/ADSL.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/ADSL.json", - "referenceNumber": 29, - "name": "Amazon Digital Services License", - "licenseId": "ADSL", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/AmazonDigitalServicesLicense" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/CDDL-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CDDL-1.0.json", - "referenceNumber": 30, - "name": "Common Development and Distribution License 1.0", - "licenseId": "CDDL-1.0", - "seeAlso": [ - "https://opensource.org/licenses/cddl1" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/Motosoto.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Motosoto.json", - "referenceNumber": 31, - "name": "Motosoto License", - "licenseId": "Motosoto", - "seeAlso": [ - "https://opensource.org/licenses/Motosoto" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/BUSL-1.1.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/BUSL-1.1.json", - "referenceNumber": 32, - "name": "Business Source License 1.1", - "licenseId": "BUSL-1.1", - "seeAlso": [ - "https://mariadb.com/bsl11/" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/OGL-UK-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/OGL-UK-1.0.json", - "referenceNumber": 33, - "name": "Open Government Licence v1.0", - "licenseId": "OGL-UK-1.0", - "seeAlso": [ - "http://www.nationalarchives.gov.uk/doc/open-government-licence/version/1/" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/xinetd.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/xinetd.json", - "referenceNumber": 34, - "name": "xinetd License", - "licenseId": "xinetd", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/Xinetd_License" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/Imlib2.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Imlib2.json", - "referenceNumber": 35, - "name": "Imlib2 License", - "licenseId": "Imlib2", - "seeAlso": [ - "http://trac.enlightenment.org/e/browser/trunk/imlib2/COPYING", - "https://git.enlightenment.org/legacy/imlib2.git/tree/COPYING" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/SNIA.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/SNIA.json", - "referenceNumber": 36, - "name": "SNIA Public License 1.1", - "licenseId": "SNIA", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/SNIA_Public_License" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/OGTSL.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/OGTSL.json", - "referenceNumber": 37, - "name": "Open Group Test Suite License", - "licenseId": "OGTSL", - "seeAlso": [ - "http://www.opengroup.org/testing/downloads/The_Open_Group_TSL.txt", - "https://opensource.org/licenses/OGTSL" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/TMate.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/TMate.json", - "referenceNumber": 38, - "name": "TMate Open Source License", - "licenseId": "TMate", - "seeAlso": [ - "http://svnkit.com/license.html" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/OCCT-PL.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/OCCT-PL.json", - "referenceNumber": 39, - "name": "Open CASCADE Technology Public License", - "licenseId": "OCCT-PL", - "seeAlso": [ - "http://www.opencascade.com/content/occt-public-license" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/GPL-1.0-or-later.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/GPL-1.0-or-later.json", - "referenceNumber": 40, - "name": "GNU General Public License v1.0 or later", - "licenseId": "GPL-1.0-or-later", - "seeAlso": [ - "https://www.gnu.org/licenses/old-licenses/gpl-1.0-standalone.html" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/YPL-1.1.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/YPL-1.1.json", - "referenceNumber": 41, - "name": "Yahoo! Public License v1.1", - "licenseId": "YPL-1.1", - "seeAlso": [ - "http://www.zimbra.com/license/yahoo_public_license_1.1.html" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/CECILL-2.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CECILL-2.0.json", - "referenceNumber": 42, - "name": "CeCILL Free Software License Agreement v2.0", - "licenseId": "CECILL-2.0", - "seeAlso": [ - "http://www.cecill.info/licences/Licence_CeCILL_V2-en.html" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/PHP-3.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/PHP-3.0.json", - "referenceNumber": 43, - "name": "PHP License v3.0", - "licenseId": "PHP-3.0", - "seeAlso": [ - "http://www.php.net/license/3_0.txt", - "https://opensource.org/licenses/PHP-3.0" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/BlueOak-1.0.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/BlueOak-1.0.0.json", - "referenceNumber": 44, - "name": "Blue Oak Model License 1.0.0", - "licenseId": "BlueOak-1.0.0", - "seeAlso": [ - "https://blueoakcouncil.org/license/1.0.0" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/Zimbra-1.3.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Zimbra-1.3.json", - "referenceNumber": 45, - "name": "Zimbra Public License v1.3", - "licenseId": "Zimbra-1.3", - "seeAlso": [ - "http://web.archive.org/web/20100302225219/http://www.zimbra.com/license/zimbra-public-license-1-3.html" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/OGC-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/OGC-1.0.json", - "referenceNumber": 46, - "name": "OGC Software License, Version 1.0", - "licenseId": "OGC-1.0", - "seeAlso": [ - "https://www.ogc.org/ogc/software/1.0" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/NASA-1.3.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/NASA-1.3.json", - "referenceNumber": 47, - "name": "NASA Open Source Agreement 1.3", - "licenseId": "NASA-1.3", - "seeAlso": [ - "http://ti.arc.nasa.gov/opensource/nosa/", - "https://opensource.org/licenses/NASA-1.3" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/SPL-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/SPL-1.0.json", - "referenceNumber": 48, - "name": "Sun Public License v1.0", - "licenseId": "SPL-1.0", - "seeAlso": [ - "https://opensource.org/licenses/SPL-1.0" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/Intel-ACPI.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Intel-ACPI.json", - "referenceNumber": 49, - "name": "Intel ACPI Software License Agreement", - "licenseId": "Intel-ACPI", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/Intel_ACPI_Software_License_Agreement" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/SISSL-1.2.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/SISSL-1.2.json", - "referenceNumber": 50, - "name": "Sun Industry Standards Source License v1.2", - "licenseId": "SISSL-1.2", - "seeAlso": [ - "http://gridscheduler.sourceforge.net/Gridengine_SISSL_license.html" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/OGL-Canada-2.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/OGL-Canada-2.0.json", - "referenceNumber": 51, - "name": "Open Government Licence - Canada", - "licenseId": "OGL-Canada-2.0", - "seeAlso": [ - "https://open.canada.ca/en/open-government-licence-canada" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/CC-BY-3.0-US.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CC-BY-3.0-US.json", - "referenceNumber": 52, - "name": "Creative Commons Attribution 3.0 United States", - "licenseId": "CC-BY-3.0-US", - "seeAlso": [ - "https://creativecommons.org/licenses/by/3.0/us/legalcode" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/copyleft-next-0.3.1.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/copyleft-next-0.3.1.json", - "referenceNumber": 53, - "name": "copyleft-next 0.3.1", - "licenseId": "copyleft-next-0.3.1", - "seeAlso": [ - "https://github.com/copyleft-next/copyleft-next/blob/master/Releases/copyleft-next-0.3.1" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/GFDL-1.1-invariants-or-later.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/GFDL-1.1-invariants-or-later.json", - "referenceNumber": 54, - "name": "GNU Free Documentation License v1.1 or later - invariants", - "licenseId": "GFDL-1.1-invariants-or-later", - "seeAlso": [ - "https://www.gnu.org/licenses/old-licenses/fdl-1.1.txt" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/GL2PS.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/GL2PS.json", - "referenceNumber": 55, - "name": "GL2PS License", - "licenseId": "GL2PS", - "seeAlso": [ - "http://www.geuz.org/gl2ps/COPYING.GL2PS" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/MS-PL.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/MS-PL.json", - "referenceNumber": 56, - "name": "Microsoft Public License", - "licenseId": "MS-PL", - "seeAlso": [ - "http://www.microsoft.com/opensource/licenses.mspx", - "https://opensource.org/licenses/MS-PL" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/SCEA.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/SCEA.json", - "referenceNumber": 57, - "name": "SCEA Shared Source License", - "licenseId": "SCEA", - "seeAlso": [ - "http://research.scea.com/scea_shared_source_license.html" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/CC-BY-ND-2.5.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CC-BY-ND-2.5.json", - "referenceNumber": 58, - "name": "Creative Commons Attribution No Derivatives 2.5 Generic", - "licenseId": "CC-BY-ND-2.5", - "seeAlso": [ - "https://creativecommons.org/licenses/by-nd/2.5/legalcode" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/SSPL-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/SSPL-1.0.json", - "referenceNumber": 59, - "name": "Server Side Public License, v 1", - "licenseId": "SSPL-1.0", - "seeAlso": [ - "https://www.mongodb.com/licensing/server-side-public-license" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/Spencer-86.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Spencer-86.json", - "referenceNumber": 60, - "name": "Spencer License 86", - "licenseId": "Spencer-86", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/Henry_Spencer_Reg-Ex_Library_License" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/LPPL-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/LPPL-1.0.json", - "referenceNumber": 61, - "name": "LaTeX Project Public License v1.0", - "licenseId": "LPPL-1.0", - "seeAlso": [ - "http://www.latex-project.org/lppl/lppl-1-0.txt" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/GPL-3.0-only.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/GPL-3.0-only.json", - "referenceNumber": 62, - "name": "GNU General Public License v3.0 only", - "licenseId": "GPL-3.0-only", - "seeAlso": [ - "https://www.gnu.org/licenses/gpl-3.0-standalone.html", - "https://opensource.org/licenses/GPL-3.0" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/GPL-2.0-with-autoconf-exception.html", - "isDeprecatedLicenseId": true, - "detailsUrl": "https://spdx.org/licenses/GPL-2.0-with-autoconf-exception.json", - "referenceNumber": 63, - "name": "GNU General Public License v2.0 w/Autoconf exception", - "licenseId": "GPL-2.0-with-autoconf-exception", - "seeAlso": [ - "http://ac-archive.sourceforge.net/doc/copyright.html" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/Giftware.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Giftware.json", - "referenceNumber": 64, - "name": "Giftware License", - "licenseId": "Giftware", - "seeAlso": [ - "http://liballeg.org/license.html#allegro-4-the-giftware-license" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/CC-BY-NC-ND-3.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-ND-3.0.json", - "referenceNumber": 65, - "name": "Creative Commons Attribution Non Commercial No Derivatives 3.0 Unported", - "licenseId": "CC-BY-NC-ND-3.0", - "seeAlso": [ - "https://creativecommons.org/licenses/by-nc-nd/3.0/legalcode" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/CNRI-Python.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CNRI-Python.json", - "referenceNumber": 66, - "name": "CNRI Python License", - "licenseId": "CNRI-Python", - "seeAlso": [ - "https://opensource.org/licenses/CNRI-Python" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/GFDL-1.2-no-invariants-or-later.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/GFDL-1.2-no-invariants-or-later.json", - "referenceNumber": 67, - "name": "GNU Free Documentation License v1.2 or later - no invariants", - "licenseId": "GFDL-1.2-no-invariants-or-later", - "seeAlso": [ - "https://www.gnu.org/licenses/old-licenses/fdl-1.2.txt" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/Afmparse.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Afmparse.json", - "referenceNumber": 68, - "name": "Afmparse License", - "licenseId": "Afmparse", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/Afmparse" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/BSD-3-Clause-LBNL.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/BSD-3-Clause-LBNL.json", - "referenceNumber": 69, - "name": "Lawrence Berkeley National Labs BSD variant license", - "licenseId": "BSD-3-Clause-LBNL", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/LBNLBSD" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/NCGL-UK-2.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/NCGL-UK-2.0.json", - "referenceNumber": 70, - "name": "Non-Commercial Government Licence", - "licenseId": "NCGL-UK-2.0", - "seeAlso": [ - "http://www.nationalarchives.gov.uk/doc/non-commercial-government-licence/version/2/" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/GPL-1.0+.html", - "isDeprecatedLicenseId": true, - "detailsUrl": "https://spdx.org/licenses/GPL-1.0+.json", - "referenceNumber": 71, - "name": "GNU General Public License v1.0 or later", - "licenseId": "GPL-1.0+", - "seeAlso": [ - "https://www.gnu.org/licenses/old-licenses/gpl-1.0-standalone.html" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/PHP-3.01.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/PHP-3.01.json", - "referenceNumber": 72, - "name": "PHP License v3.01", - "licenseId": "PHP-3.01", - "seeAlso": [ - "http://www.php.net/license/3_01.txt" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/Leptonica.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Leptonica.json", - "referenceNumber": 73, - "name": "Leptonica License", - "licenseId": "Leptonica", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/Leptonica" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/bzip2-1.0.5.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/bzip2-1.0.5.json", - "referenceNumber": 74, - "name": "bzip2 and libbzip2 License v1.0.5", - "licenseId": "bzip2-1.0.5", - "seeAlso": [ - "https://sourceware.org/bzip2/1.0.5/bzip2-manual-1.0.5.html", - "http://bzip.org/1.0.5/bzip2-manual-1.0.5.html" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/NIST-PD-fallback.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/NIST-PD-fallback.json", - "referenceNumber": 75, - "name": "NIST Public Domain Notice with license fallback", - "licenseId": "NIST-PD-fallback", - "seeAlso": [ - "https://github.com/usnistgov/jsip/blob/59700e6926cbe96c5cdae897d9a7d2656b42abe3/LICENSE", - "https://github.com/usnistgov/fipy/blob/86aaa5c2ba2c6f1be19593c5986071cf6568cc34/LICENSE.rst" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/OSL-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/OSL-1.0.json", - "referenceNumber": 76, - "name": "Open Software License 1.0", - "licenseId": "OSL-1.0", - "seeAlso": [ - "https://opensource.org/licenses/OSL-1.0" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/OFL-1.1.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/OFL-1.1.json", - "referenceNumber": 77, - "name": "SIL Open Font License 1.1", - "licenseId": "OFL-1.1", - "seeAlso": [ - "http://scripts.sil.org/cms/scripts/page.php?item_id\u003dOFL_web", - "https://opensource.org/licenses/OFL-1.1" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/JasPer-2.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/JasPer-2.0.json", - "referenceNumber": 78, - "name": "JasPer License", - "licenseId": "JasPer-2.0", - "seeAlso": [ - "http://www.ece.uvic.ca/~mdadams/jasper/LICENSE" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/Naumen.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Naumen.json", - "referenceNumber": 79, - "name": "Naumen Public License", - "licenseId": "Naumen", - "seeAlso": [ - "https://opensource.org/licenses/Naumen" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/AGPL-1.0-only.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/AGPL-1.0-only.json", - "referenceNumber": 80, - "name": "Affero General Public License v1.0 only", - "licenseId": "AGPL-1.0-only", - "seeAlso": [ - "http://www.affero.org/oagpl.html" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/C-UDA-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/C-UDA-1.0.json", - "referenceNumber": 81, - "name": "Computational Use of Data Agreement v1.0", - "licenseId": "C-UDA-1.0", - "seeAlso": [ - "https://github.com/microsoft/Computational-Use-of-Data-Agreement/blob/master/C-UDA-1.0.md", - "https://cdla.dev/computational-use-of-data-agreement-v1-0/" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/MIT.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/MIT.json", - "referenceNumber": 82, - "name": "MIT License", - "licenseId": "MIT", - "seeAlso": [ - "https://opensource.org/licenses/MIT" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/TCL.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/TCL.json", - "referenceNumber": 83, - "name": "TCL/TK License", - "licenseId": "TCL", - "seeAlso": [ - "http://www.tcl.tk/software/tcltk/license.html", - "https://fedoraproject.org/wiki/Licensing/TCL" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/LGPL-3.0-only.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/LGPL-3.0-only.json", - "referenceNumber": 84, - "name": "GNU Lesser General Public License v3.0 only", - "licenseId": "LGPL-3.0-only", - "seeAlso": [ - "https://www.gnu.org/licenses/lgpl-3.0-standalone.html", - "https://opensource.org/licenses/LGPL-3.0" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/ECL-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/ECL-1.0.json", - "referenceNumber": 85, - "name": "Educational Community License v1.0", - "licenseId": "ECL-1.0", - "seeAlso": [ - "https://opensource.org/licenses/ECL-1.0" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/MPL-2.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/MPL-2.0.json", - "referenceNumber": 86, - "name": "Mozilla Public License 2.0", - "licenseId": "MPL-2.0", - "seeAlso": [ - "http://www.mozilla.org/MPL/2.0/", - "https://opensource.org/licenses/MPL-2.0" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/CC-BY-NC-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-1.0.json", - "referenceNumber": 87, - "name": "Creative Commons Attribution Non Commercial 1.0 Generic", - "licenseId": "CC-BY-NC-1.0", - "seeAlso": [ - "https://creativecommons.org/licenses/by-nc/1.0/legalcode" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/CC-BY-NC-ND-2.5.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-ND-2.5.json", - "referenceNumber": 88, - "name": "Creative Commons Attribution Non Commercial No Derivatives 2.5 Generic", - "licenseId": "CC-BY-NC-ND-2.5", - "seeAlso": [ - "https://creativecommons.org/licenses/by-nc-nd/2.5/legalcode" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/LPPL-1.3c.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/LPPL-1.3c.json", - "referenceNumber": 89, - "name": "LaTeX Project Public License v1.3c", - "licenseId": "LPPL-1.3c", - "seeAlso": [ - "http://www.latex-project.org/lppl/lppl-1-3c.txt", - "https://opensource.org/licenses/LPPL-1.3c" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/JSON.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/JSON.json", - "referenceNumber": 90, - "name": "JSON License", - "licenseId": "JSON", - "seeAlso": [ - "http://www.json.org/license.html" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/NBPL-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/NBPL-1.0.json", - "referenceNumber": 91, - "name": "Net Boolean Public License v1", - "licenseId": "NBPL-1.0", - "seeAlso": [ - "http://www.openldap.org/devel/gitweb.cgi?p\u003dopenldap.git;a\u003dblob;f\u003dLICENSE;hb\u003d37b4b3f6cc4bf34e1d3dec61e69914b9819d8894" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/CAL-1.0-Combined-Work-Exception.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CAL-1.0-Combined-Work-Exception.json", - "referenceNumber": 92, - "name": "Cryptographic Autonomy License 1.0 (Combined Work Exception)", - "licenseId": "CAL-1.0-Combined-Work-Exception", - "seeAlso": [ - "http://cryptographicautonomylicense.com/license-text.html", - "https://opensource.org/licenses/CAL-1.0" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/Unlicense.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Unlicense.json", - "referenceNumber": 93, - "name": "The Unlicense", - "licenseId": "Unlicense", - "seeAlso": [ - "https://unlicense.org/" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/CNRI-Python-GPL-Compatible.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CNRI-Python-GPL-Compatible.json", - "referenceNumber": 94, - "name": "CNRI Python Open Source GPL Compatible License Agreement", - "licenseId": "CNRI-Python-GPL-Compatible", - "seeAlso": [ - "http://www.python.org/download/releases/1.6.1/download_win/" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/TU-Berlin-2.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/TU-Berlin-2.0.json", - "referenceNumber": 95, - "name": "Technische Universitaet Berlin License 2.0", - "licenseId": "TU-Berlin-2.0", - "seeAlso": [ - "https://github.com/CorsixTH/deps/blob/fd339a9f526d1d9c9f01ccf39e438a015da50035/licences/libgsm.txt" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/NLPL.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/NLPL.json", - "referenceNumber": 96, - "name": "No Limit Public License", - "licenseId": "NLPL", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/NLPL" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/LGPL-3.0-or-later.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/LGPL-3.0-or-later.json", - "referenceNumber": 97, - "name": "GNU Lesser General Public License v3.0 or later", - "licenseId": "LGPL-3.0-or-later", - "seeAlso": [ - "https://www.gnu.org/licenses/lgpl-3.0-standalone.html", - "https://opensource.org/licenses/LGPL-3.0" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/Beerware.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Beerware.json", - "referenceNumber": 98, - "name": "Beerware License", - "licenseId": "Beerware", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/Beerware", - "https://people.freebsd.org/~phk/" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/NGPL.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/NGPL.json", - "referenceNumber": 99, - "name": "Nethack General Public License", - "licenseId": "NGPL", - "seeAlso": [ - "https://opensource.org/licenses/NGPL" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/ZPL-2.1.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/ZPL-2.1.json", - "referenceNumber": 100, - "name": "Zope Public License 2.1", - "licenseId": "ZPL-2.1", - "seeAlso": [ - "http://old.zope.org/Resources/ZPL/" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/Saxpath.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Saxpath.json", - "referenceNumber": 101, - "name": "Saxpath License", - "licenseId": "Saxpath", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/Saxpath_License" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/CC-BY-SA-2.0-UK.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CC-BY-SA-2.0-UK.json", - "referenceNumber": 102, - "name": "Creative Commons Attribution Share Alike 2.0 England and Wales", - "licenseId": "CC-BY-SA-2.0-UK", - "seeAlso": [ - "https://creativecommons.org/licenses/by-sa/2.0/uk/legalcode" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/CECILL-2.1.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CECILL-2.1.json", - "referenceNumber": 103, - "name": "CeCILL Free Software License Agreement v2.1", - "licenseId": "CECILL-2.1", - "seeAlso": [ - "http://www.cecill.info/licences/Licence_CeCILL_V2.1-en.html" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/XFree86-1.1.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/XFree86-1.1.json", - "referenceNumber": 104, - "name": "XFree86 License 1.1", - "licenseId": "XFree86-1.1", - "seeAlso": [ - "http://www.xfree86.org/current/LICENSE4.html" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/IBM-pibs.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/IBM-pibs.json", - "referenceNumber": 105, - "name": "IBM PowerPC Initialization and Boot Software", - "licenseId": "IBM-pibs", - "seeAlso": [ - "http://git.denx.de/?p\u003du-boot.git;a\u003dblob;f\u003darch/powerpc/cpu/ppc4xx/miiphy.c;h\u003d297155fdafa064b955e53e9832de93bfb0cfb85b;hb\u003d9fab4bf4cc077c21e43941866f3f2c196f28670d" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/Zlib.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Zlib.json", - "referenceNumber": 106, - "name": "zlib License", - "licenseId": "Zlib", - "seeAlso": [ - "http://www.zlib.net/zlib_license.html", - "https://opensource.org/licenses/Zlib" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/StandardML-NJ.html", - "isDeprecatedLicenseId": true, - "detailsUrl": "https://spdx.org/licenses/StandardML-NJ.json", - "referenceNumber": 107, - "name": "Standard ML of New Jersey License", - "licenseId": "StandardML-NJ", - "seeAlso": [ - "http://www.smlnj.org//license.html" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/RPSL-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/RPSL-1.0.json", - "referenceNumber": 108, - "name": "RealNetworks Public Source License v1.0", - "licenseId": "RPSL-1.0", - "seeAlso": [ - "https://helixcommunity.org/content/rpsl", - "https://opensource.org/licenses/RPSL-1.0" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/CECILL-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CECILL-1.0.json", - "referenceNumber": 109, - "name": "CeCILL Free Software License Agreement v1.0", - "licenseId": "CECILL-1.0", - "seeAlso": [ - "http://www.cecill.info/licences/Licence_CeCILL_V1-fr.html" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/OGL-UK-3.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/OGL-UK-3.0.json", - "referenceNumber": 110, - "name": "Open Government Licence v3.0", - "licenseId": "OGL-UK-3.0", - "seeAlso": [ - "http://www.nationalarchives.gov.uk/doc/open-government-licence/version/3/" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/BSD-4-Clause-Shortened.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/BSD-4-Clause-Shortened.json", - "referenceNumber": 111, - "name": "BSD 4 Clause Shortened", - "licenseId": "BSD-4-Clause-Shortened", - "seeAlso": [ - "https://metadata.ftp-master.debian.org/changelogs//main/a/arpwatch/arpwatch_2.1a15-7_copyright" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/Watcom-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Watcom-1.0.json", - "referenceNumber": 112, - "name": "Sybase Open Watcom Public License 1.0", - "licenseId": "Watcom-1.0", - "seeAlso": [ - "https://opensource.org/licenses/Watcom-1.0" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/Wsuipa.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Wsuipa.json", - "referenceNumber": 113, - "name": "Wsuipa License", - "licenseId": "Wsuipa", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/Wsuipa" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/TU-Berlin-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/TU-Berlin-1.0.json", - "referenceNumber": 114, - "name": "Technische Universitaet Berlin License 1.0", - "licenseId": "TU-Berlin-1.0", - "seeAlso": [ - "https://github.com/swh/ladspa/blob/7bf6f3799fdba70fda297c2d8fd9f526803d9680/gsm/COPYRIGHT" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/Latex2e.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Latex2e.json", - "referenceNumber": 115, - "name": "Latex2e License", - "licenseId": "Latex2e", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/Latex2e" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/CECILL-B.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CECILL-B.json", - "referenceNumber": 116, - "name": "CeCILL-B Free Software License Agreement", - "licenseId": "CECILL-B", - "seeAlso": [ - "http://www.cecill.info/licences/Licence_CeCILL-B_V1-en.html" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/EUPL-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/EUPL-1.0.json", - "referenceNumber": 117, - "name": "European Union Public License 1.0", - "licenseId": "EUPL-1.0", - "seeAlso": [ - "http://ec.europa.eu/idabc/en/document/7330.html", - "http://ec.europa.eu/idabc/servlets/Doc027f.pdf?id\u003d31096" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/GFDL-1.2-or-later.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/GFDL-1.2-or-later.json", - "referenceNumber": 118, - "name": "GNU Free Documentation License v1.2 or later", - "licenseId": "GFDL-1.2-or-later", - "seeAlso": [ - "https://www.gnu.org/licenses/old-licenses/fdl-1.2.txt" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/CPL-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CPL-1.0.json", - "referenceNumber": 119, - "name": "Common Public License 1.0", - "licenseId": "CPL-1.0", - "seeAlso": [ - "https://opensource.org/licenses/CPL-1.0" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/CC-BY-ND-3.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CC-BY-ND-3.0.json", - "referenceNumber": 120, - "name": "Creative Commons Attribution No Derivatives 3.0 Unported", - "licenseId": "CC-BY-ND-3.0", - "seeAlso": [ - "https://creativecommons.org/licenses/by-nd/3.0/legalcode" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/NTP.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/NTP.json", - "referenceNumber": 121, - "name": "NTP License", - "licenseId": "NTP", - "seeAlso": [ - "https://opensource.org/licenses/NTP" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/W3C-19980720.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/W3C-19980720.json", - "referenceNumber": 122, - "name": "W3C Software Notice and License (1998-07-20)", - "licenseId": "W3C-19980720", - "seeAlso": [ - "http://www.w3.org/Consortium/Legal/copyright-software-19980720.html" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/GFDL-1.3-only.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/GFDL-1.3-only.json", - "referenceNumber": 123, - "name": "GNU Free Documentation License v1.3 only", - "licenseId": "GFDL-1.3-only", - "seeAlso": [ - "https://www.gnu.org/licenses/fdl-1.3.txt" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/CC-BY-SA-4.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CC-BY-SA-4.0.json", - "referenceNumber": 124, - "name": "Creative Commons Attribution Share Alike 4.0 International", - "licenseId": "CC-BY-SA-4.0", - "seeAlso": [ - "https://creativecommons.org/licenses/by-sa/4.0/legalcode" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/EUPL-1.1.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/EUPL-1.1.json", - "referenceNumber": 125, - "name": "European Union Public License 1.1", - "licenseId": "EUPL-1.1", - "seeAlso": [ - "https://joinup.ec.europa.eu/software/page/eupl/licence-eupl", - "https://joinup.ec.europa.eu/sites/default/files/custom-page/attachment/eupl1.1.-licence-en_0.pdf", - "https://opensource.org/licenses/EUPL-1.1" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/GFDL-1.1-no-invariants-only.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/GFDL-1.1-no-invariants-only.json", - "referenceNumber": 126, - "name": "GNU Free Documentation License v1.1 only - no invariants", - "licenseId": "GFDL-1.1-no-invariants-only", - "seeAlso": [ - "https://www.gnu.org/licenses/old-licenses/fdl-1.1.txt" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/JPNIC.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/JPNIC.json", - "referenceNumber": 127, - "name": "Japan Network Information Center License", - "licenseId": "JPNIC", - "seeAlso": [ - "https://gitlab.isc.org/isc-projects/bind9/blob/master/COPYRIGHT#L366" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/AMPAS.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/AMPAS.json", - "referenceNumber": 128, - "name": "Academy of Motion Picture Arts and Sciences BSD", - "licenseId": "AMPAS", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/BSD#AMPASBSD" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/BSD-3-Clause.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/BSD-3-Clause.json", - "referenceNumber": 129, - "name": "BSD 3-Clause \"New\" or \"Revised\" License", - "licenseId": "BSD-3-Clause", - "seeAlso": [ - "https://opensource.org/licenses/BSD-3-Clause" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/MIT-0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/MIT-0.json", - "referenceNumber": 130, - "name": "MIT No Attribution", - "licenseId": "MIT-0", - "seeAlso": [ - "https://github.com/aws/mit-0", - "https://romanrm.net/mit-zero", - "https://github.com/awsdocs/aws-cloud9-user-guide/blob/master/LICENSE-SAMPLECODE" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/Intel.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Intel.json", - "referenceNumber": 131, - "name": "Intel Open Source License", - "licenseId": "Intel", - "seeAlso": [ - "https://opensource.org/licenses/Intel" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/O-UDA-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/O-UDA-1.0.json", - "referenceNumber": 132, - "name": "Open Use of Data Agreement v1.0", - "licenseId": "O-UDA-1.0", - "seeAlso": [ - "https://github.com/microsoft/Open-Use-of-Data-Agreement/blob/v1.0/O-UDA-1.0.md", - "https://cdla.dev/open-use-of-data-agreement-v1-0/" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/NPL-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/NPL-1.0.json", - "referenceNumber": 133, - "name": "Netscape Public License v1.0", - "licenseId": "NPL-1.0", - "seeAlso": [ - "http://www.mozilla.org/MPL/NPL/1.0/" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/CC-BY-NC-2.5.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-2.5.json", - "referenceNumber": 134, - "name": "Creative Commons Attribution Non Commercial 2.5 Generic", - "licenseId": "CC-BY-NC-2.5", - "seeAlso": [ - "https://creativecommons.org/licenses/by-nc/2.5/legalcode" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/Mup.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Mup.json", - "referenceNumber": 135, - "name": "Mup License", - "licenseId": "Mup", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/Mup" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/Newsletr.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Newsletr.json", - "referenceNumber": 136, - "name": "Newsletr License", - "licenseId": "Newsletr", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/Newsletr" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/PDDL-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/PDDL-1.0.json", - "referenceNumber": 137, - "name": "Open Data Commons Public Domain Dedication \u0026 License 1.0", - "licenseId": "PDDL-1.0", - "seeAlso": [ - "http://opendatacommons.org/licenses/pddl/1.0/", - "https://opendatacommons.org/licenses/pddl/" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/SMLNJ.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/SMLNJ.json", - "referenceNumber": 138, - "name": "Standard ML of New Jersey License", - "licenseId": "SMLNJ", - "seeAlso": [ - "https://www.smlnj.org/license.html" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/BSD-1-Clause.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/BSD-1-Clause.json", - "referenceNumber": 139, - "name": "BSD 1-Clause License", - "licenseId": "BSD-1-Clause", - "seeAlso": [ - "https://svnweb.freebsd.org/base/head/include/ifaddrs.h?revision\u003d326823" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/SimPL-2.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/SimPL-2.0.json", - "referenceNumber": 140, - "name": "Simple Public License 2.0", - "licenseId": "SimPL-2.0", - "seeAlso": [ - "https://opensource.org/licenses/SimPL-2.0" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/OLDAP-1.2.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/OLDAP-1.2.json", - "referenceNumber": 141, - "name": "Open LDAP Public License v1.2", - "licenseId": "OLDAP-1.2", - "seeAlso": [ - "http://www.openldap.org/devel/gitweb.cgi?p\u003dopenldap.git;a\u003dblob;f\u003dLICENSE;hb\u003d42b0383c50c299977b5893ee695cf4e486fb0dc7" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/Xnet.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Xnet.json", - "referenceNumber": 142, - "name": "X.Net License", - "licenseId": "Xnet", - "seeAlso": [ - "https://opensource.org/licenses/Xnet" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/BSD-2-Clause.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/BSD-2-Clause.json", - "referenceNumber": 143, - "name": "BSD 2-Clause \"Simplified\" License", - "licenseId": "BSD-2-Clause", - "seeAlso": [ - "https://opensource.org/licenses/BSD-2-Clause" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/AML.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/AML.json", - "referenceNumber": 144, - "name": "Apple MIT License", - "licenseId": "AML", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/Apple_MIT_License" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/GFDL-1.2-only.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/GFDL-1.2-only.json", - "referenceNumber": 145, - "name": "GNU Free Documentation License v1.2 only", - "licenseId": "GFDL-1.2-only", - "seeAlso": [ - "https://www.gnu.org/licenses/old-licenses/fdl-1.2.txt" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/Info-ZIP.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Info-ZIP.json", - "referenceNumber": 146, - "name": "Info-ZIP License", - "licenseId": "Info-ZIP", - "seeAlso": [ - "http://www.info-zip.org/license.html" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/DSDP.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/DSDP.json", - "referenceNumber": 147, - "name": "DSDP License", - "licenseId": "DSDP", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/DSDP" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/AGPL-1.0.html", - "isDeprecatedLicenseId": true, - "detailsUrl": "https://spdx.org/licenses/AGPL-1.0.json", - "referenceNumber": 148, - "name": "Affero General Public License v1.0", - "licenseId": "AGPL-1.0", - "seeAlso": [ - "http://www.affero.org/oagpl.html" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/BSD-4-Clause-UC.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/BSD-4-Clause-UC.json", - "referenceNumber": 149, - "name": "BSD-4-Clause (University of California-Specific)", - "licenseId": "BSD-4-Clause-UC", - "seeAlso": [ - "http://www.freebsd.org/copyright/license.html" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/LGPL-2.1-only.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/LGPL-2.1-only.json", - "referenceNumber": 150, - "name": "GNU Lesser General Public License v2.1 only", - "licenseId": "LGPL-2.1-only", - "seeAlso": [ - "https://www.gnu.org/licenses/old-licenses/lgpl-2.1-standalone.html", - "https://opensource.org/licenses/LGPL-2.1" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/OFL-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/OFL-1.0.json", - "referenceNumber": 151, - "name": "SIL Open Font License 1.0", - "licenseId": "OFL-1.0", - "seeAlso": [ - "http://scripts.sil.org/cms/scripts/page.php?item_id\u003dOFL10_web" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/CDL-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CDL-1.0.json", - "referenceNumber": 152, - "name": "Common Documentation License 1.0", - "licenseId": "CDL-1.0", - "seeAlso": [ - "http://www.opensource.apple.com/cdl/", - "https://fedoraproject.org/wiki/Licensing/Common_Documentation_License", - "https://www.gnu.org/licenses/license-list.html#ACDL" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/LAL-1.3.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/LAL-1.3.json", - "referenceNumber": 153, - "name": "Licence Art Libre 1.3", - "licenseId": "LAL-1.3", - "seeAlso": [ - "https://artlibre.org/" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/Sendmail.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Sendmail.json", - "referenceNumber": 154, - "name": "Sendmail License", - "licenseId": "Sendmail", - "seeAlso": [ - "http://www.sendmail.com/pdfs/open_source/sendmail_license.pdf", - "https://web.archive.org/web/20160322142305/https://www.sendmail.com/pdfs/open_source/sendmail_license.pdf" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/OGDL-Taiwan-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/OGDL-Taiwan-1.0.json", - "referenceNumber": 155, - "name": "Taiwan Open Government Data License, version 1.0", - "licenseId": "OGDL-Taiwan-1.0", - "seeAlso": [ - "https://data.gov.tw/license" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/Zimbra-1.4.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Zimbra-1.4.json", - "referenceNumber": 156, - "name": "Zimbra Public License v1.4", - "licenseId": "Zimbra-1.4", - "seeAlso": [ - "http://www.zimbra.com/legal/zimbra-public-license-1-4" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/Borceux.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Borceux.json", - "referenceNumber": 157, - "name": "Borceux license", - "licenseId": "Borceux", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/Borceux" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/OSL-3.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/OSL-3.0.json", - "referenceNumber": 158, - "name": "Open Software License 3.0", - "licenseId": "OSL-3.0", - "seeAlso": [ - "https://web.archive.org/web/20120101081418/http://rosenlaw.com:80/OSL3.0.htm", - "https://opensource.org/licenses/OSL-3.0" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/AMDPLPA.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/AMDPLPA.json", - "referenceNumber": 159, - "name": "AMD\u0027s plpa_map.c License", - "licenseId": "AMDPLPA", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/AMD_plpa_map_License" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/CC-BY-NC-SA-3.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-SA-3.0.json", - "referenceNumber": 160, - "name": "Creative Commons Attribution Non Commercial Share Alike 3.0 Unported", - "licenseId": "CC-BY-NC-SA-3.0", - "seeAlso": [ - "https://creativecommons.org/licenses/by-nc-sa/3.0/legalcode" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/OLDAP-2.1.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/OLDAP-2.1.json", - "referenceNumber": 161, - "name": "Open LDAP Public License v2.1", - "licenseId": "OLDAP-2.1", - "seeAlso": [ - "http://www.openldap.org/devel/gitweb.cgi?p\u003dopenldap.git;a\u003dblob;f\u003dLICENSE;hb\u003db0d176738e96a0d3b9f85cb51e140a86f21be715" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/BSD-2-Clause-FreeBSD.html", - "isDeprecatedLicenseId": true, - "detailsUrl": "https://spdx.org/licenses/BSD-2-Clause-FreeBSD.json", - "referenceNumber": 162, - "name": "BSD 2-Clause FreeBSD License", - "licenseId": "BSD-2-Clause-FreeBSD", - "seeAlso": [ - "http://www.freebsd.org/copyright/freebsd-license.html" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/CPOL-1.02.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CPOL-1.02.json", - "referenceNumber": 163, - "name": "Code Project Open License 1.02", - "licenseId": "CPOL-1.02", - "seeAlso": [ - "http://www.codeproject.com/info/cpol10.aspx" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/MPL-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/MPL-1.0.json", - "referenceNumber": 164, - "name": "Mozilla Public License 1.0", - "licenseId": "MPL-1.0", - "seeAlso": [ - "http://www.mozilla.org/MPL/MPL-1.0.html", - "https://opensource.org/licenses/MPL-1.0" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/blessing.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/blessing.json", - "referenceNumber": 165, - "name": "SQLite Blessing", - "licenseId": "blessing", - "seeAlso": [ - "https://www.sqlite.org/src/artifact/e33a4df7e32d742a?ln\u003d4-9", - "https://sqlite.org/src/artifact/df5091916dbb40e6" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/Parity-6.0.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Parity-6.0.0.json", - "referenceNumber": 166, - "name": "The Parity Public License 6.0.0", - "licenseId": "Parity-6.0.0", - "seeAlso": [ - "https://paritylicense.com/versions/6.0.0.html" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/AFL-3.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/AFL-3.0.json", - "referenceNumber": 167, - "name": "Academic Free License v3.0", - "licenseId": "AFL-3.0", - "seeAlso": [ - "http://www.rosenlaw.com/AFL3.0.htm", - "https://opensource.org/licenses/afl-3.0" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/SGI-B-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/SGI-B-1.0.json", - "referenceNumber": 168, - "name": "SGI Free Software License B v1.0", - "licenseId": "SGI-B-1.0", - "seeAlso": [ - "http://oss.sgi.com/projects/FreeB/SGIFreeSWLicB.1.0.html" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/BSD-2-Clause-Patent.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/BSD-2-Clause-Patent.json", - "referenceNumber": 169, - "name": "BSD-2-Clause Plus Patent License", - "licenseId": "BSD-2-Clause-Patent", - "seeAlso": [ - "https://opensource.org/licenses/BSDplusPatent" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/Artistic-1.0-cl8.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Artistic-1.0-cl8.json", - "referenceNumber": 170, - "name": "Artistic License 1.0 w/clause 8", - "licenseId": "Artistic-1.0-cl8", - "seeAlso": [ - "https://opensource.org/licenses/Artistic-1.0" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/CC-BY-NC-ND-4.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-ND-4.0.json", - "referenceNumber": 171, - "name": "Creative Commons Attribution Non Commercial No Derivatives 4.0 International", - "licenseId": "CC-BY-NC-ND-4.0", - "seeAlso": [ - "https://creativecommons.org/licenses/by-nc-nd/4.0/legalcode" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/Apache-1.1.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Apache-1.1.json", - "referenceNumber": 172, - "name": "Apache License 1.1", - "licenseId": "Apache-1.1", - "seeAlso": [ - "http://apache.org/licenses/LICENSE-1.1", - "https://opensource.org/licenses/Apache-1.1" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/ErlPL-1.1.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/ErlPL-1.1.json", - "referenceNumber": 173, - "name": "Erlang Public License v1.1", - "licenseId": "ErlPL-1.1", - "seeAlso": [ - "http://www.erlang.org/EPLICENSE" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/OFL-1.0-RFN.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/OFL-1.0-RFN.json", - "referenceNumber": 174, - "name": "SIL Open Font License 1.0 with Reserved Font Name", - "licenseId": "OFL-1.0-RFN", - "seeAlso": [ - "http://scripts.sil.org/cms/scripts/page.php?item_id\u003dOFL10_web" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/CC-BY-NC-3.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-3.0.json", - "referenceNumber": 175, - "name": "Creative Commons Attribution Non Commercial 3.0 Unported", - "licenseId": "CC-BY-NC-3.0", - "seeAlso": [ - "https://creativecommons.org/licenses/by-nc/3.0/legalcode" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/CC-BY-NC-2.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-2.0.json", - "referenceNumber": 176, - "name": "Creative Commons Attribution Non Commercial 2.0 Generic", - "licenseId": "CC-BY-NC-2.0", - "seeAlso": [ - "https://creativecommons.org/licenses/by-nc/2.0/legalcode" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/MakeIndex.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/MakeIndex.json", - "referenceNumber": 177, - "name": "MakeIndex License", - "licenseId": "MakeIndex", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/MakeIndex" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/Barr.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Barr.json", - "referenceNumber": 178, - "name": "Barr License", - "licenseId": "Barr", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/Barr" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/CC-BY-SA-2.1-JP.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CC-BY-SA-2.1-JP.json", - "referenceNumber": 179, - "name": "Creative Commons Attribution Share Alike 2.1 Japan", - "licenseId": "CC-BY-SA-2.1-JP", - "seeAlso": [ - "https://creativecommons.org/licenses/by-sa/2.1/jp/legalcode" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/GFDL-1.2-no-invariants-only.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/GFDL-1.2-no-invariants-only.json", - "referenceNumber": 180, - "name": "GNU Free Documentation License v1.2 only - no invariants", - "licenseId": "GFDL-1.2-no-invariants-only", - "seeAlso": [ - "https://www.gnu.org/licenses/old-licenses/fdl-1.2.txt" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/Hippocratic-2.1.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Hippocratic-2.1.json", - "referenceNumber": 181, - "name": "Hippocratic License 2.1", - "licenseId": "Hippocratic-2.1", - "seeAlso": [ - "https://firstdonoharm.dev/version/2/1/license.html", - "https://github.com/EthicalSource/hippocratic-license/blob/58c0e646d64ff6fbee275bfe2b9492f914e3ab2a/LICENSE.txt" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/Adobe-2006.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Adobe-2006.json", - "referenceNumber": 182, - "name": "Adobe Systems Incorporated Source Code License Agreement", - "licenseId": "Adobe-2006", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/AdobeLicense" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/OSL-2.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/OSL-2.0.json", - "referenceNumber": 183, - "name": "Open Software License 2.0", - "licenseId": "OSL-2.0", - "seeAlso": [ - "http://web.archive.org/web/20041020171434/http://www.rosenlaw.com/osl2.0.html" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/CC-BY-NC-SA-4.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-SA-4.0.json", - "referenceNumber": 184, - "name": "Creative Commons Attribution Non Commercial Share Alike 4.0 International", - "licenseId": "CC-BY-NC-SA-4.0", - "seeAlso": [ - "https://creativecommons.org/licenses/by-nc-sa/4.0/legalcode" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/LGPL-2.1-or-later.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/LGPL-2.1-or-later.json", - "referenceNumber": 185, - "name": "GNU Lesser General Public License v2.1 or later", - "licenseId": "LGPL-2.1-or-later", - "seeAlso": [ - "https://www.gnu.org/licenses/old-licenses/lgpl-2.1-standalone.html", - "https://opensource.org/licenses/LGPL-2.1" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/PolyForm-Noncommercial-1.0.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/PolyForm-Noncommercial-1.0.0.json", - "referenceNumber": 186, - "name": "PolyForm Noncommercial License 1.0.0", - "licenseId": "PolyForm-Noncommercial-1.0.0", - "seeAlso": [ - "https://polyformproject.org/licenses/noncommercial/1.0.0" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/OpenSSL.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/OpenSSL.json", - "referenceNumber": 187, - "name": "OpenSSL License", - "licenseId": "OpenSSL", - "seeAlso": [ - "http://www.openssl.org/source/license.html" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/GPL-3.0-with-GCC-exception.html", - "isDeprecatedLicenseId": true, - "detailsUrl": "https://spdx.org/licenses/GPL-3.0-with-GCC-exception.json", - "referenceNumber": 188, - "name": "GNU General Public License v3.0 w/GCC Runtime Library exception", - "licenseId": "GPL-3.0-with-GCC-exception", - "seeAlso": [ - "https://www.gnu.org/licenses/gcc-exception-3.1.html" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/OPL-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/OPL-1.0.json", - "referenceNumber": 189, - "name": "Open Public License v1.0", - "licenseId": "OPL-1.0", - "seeAlso": [ - "http://old.koalateam.com/jackaroo/OPL_1_0.TXT", - "https://fedoraproject.org/wiki/Licensing/Open_Public_License" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/BSD-3-Clause-Attribution.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/BSD-3-Clause-Attribution.json", - "referenceNumber": 190, - "name": "BSD with attribution", - "licenseId": "BSD-3-Clause-Attribution", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/BSD_with_Attribution" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/Rdisc.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Rdisc.json", - "referenceNumber": 191, - "name": "Rdisc License", - "licenseId": "Rdisc", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/Rdisc_License" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/MS-RL.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/MS-RL.json", - "referenceNumber": 192, - "name": "Microsoft Reciprocal License", - "licenseId": "MS-RL", - "seeAlso": [ - "http://www.microsoft.com/opensource/licenses.mspx", - "https://opensource.org/licenses/MS-RL" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/EUDatagrid.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/EUDatagrid.json", - "referenceNumber": 193, - "name": "EU DataGrid Software License", - "licenseId": "EUDatagrid", - "seeAlso": [ - "http://eu-datagrid.web.cern.ch/eu-datagrid/license.html", - "https://opensource.org/licenses/EUDatagrid" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/LGPLLR.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/LGPLLR.json", - "referenceNumber": 194, - "name": "Lesser General Public License For Linguistic Resources", - "licenseId": "LGPLLR", - "seeAlso": [ - "http://www-igm.univ-mlv.fr/~unitex/lgpllr.html" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/AFL-2.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/AFL-2.0.json", - "referenceNumber": 195, - "name": "Academic Free License v2.0", - "licenseId": "AFL-2.0", - "seeAlso": [ - "http://wayback.archive.org/web/20060924134533/http://www.opensource.org/licenses/afl-2.0.txt" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/MIT-Modern-Variant.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/MIT-Modern-Variant.json", - "referenceNumber": 196, - "name": "MIT License Modern Variant", - "licenseId": "MIT-Modern-Variant", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing:MIT#Modern_Variants", - "https://ptolemy.berkeley.edu/copyright.htm", - "https://pirlwww.lpl.arizona.edu/resources/guide/software/PerlTk/Tixlic.html" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/GFDL-1.3-invariants-only.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/GFDL-1.3-invariants-only.json", - "referenceNumber": 197, - "name": "GNU Free Documentation License v1.3 only - invariants", - "licenseId": "GFDL-1.3-invariants-only", - "seeAlso": [ - "https://www.gnu.org/licenses/fdl-1.3.txt" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/LiLiQ-R-1.1.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/LiLiQ-R-1.1.json", - "referenceNumber": 198, - "name": "Licence Libre du Quรฉbec โ€“ Rรฉciprocitรฉ version 1.1", - "licenseId": "LiLiQ-R-1.1", - "seeAlso": [ - "https://www.forge.gouv.qc.ca/participez/licence-logicielle/licence-libre-du-quebec-liliq-en-francais/licence-libre-du-quebec-reciprocite-liliq-r-v1-1/", - "http://opensource.org/licenses/LiLiQ-R-1.1" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/CDLA-Permissive-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CDLA-Permissive-1.0.json", - "referenceNumber": 199, - "name": "Community Data License Agreement Permissive 1.0", - "licenseId": "CDLA-Permissive-1.0", - "seeAlso": [ - "https://cdla.io/permissive-1-0" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/DRL-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/DRL-1.0.json", - "referenceNumber": 200, - "name": "Detection Rule License 1.0", - "licenseId": "DRL-1.0", - "seeAlso": [ - "https://github.com/Neo23x0/sigma/blob/master/LICENSE.Detection.Rules.md" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/BSD-Source-Code.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/BSD-Source-Code.json", - "referenceNumber": 201, - "name": "BSD Source Code Attribution", - "licenseId": "BSD-Source-Code", - "seeAlso": [ - "https://github.com/robbiehanson/CocoaHTTPServer/blob/master/LICENSE.txt" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/CC-BY-NC-ND-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-ND-1.0.json", - "referenceNumber": 202, - "name": "Creative Commons Attribution Non Commercial No Derivatives 1.0 Generic", - "licenseId": "CC-BY-NC-ND-1.0", - "seeAlso": [ - "https://creativecommons.org/licenses/by-nd-nc/1.0/legalcode" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/GLWTPL.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/GLWTPL.json", - "referenceNumber": 203, - "name": "Good Luck With That Public License", - "licenseId": "GLWTPL", - "seeAlso": [ - "https://github.com/me-shaon/GLWTPL/commit/da5f6bc734095efbacb442c0b31e33a65b9d6e85" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/VSL-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/VSL-1.0.json", - "referenceNumber": 204, - "name": "Vovida Software License v1.0", - "licenseId": "VSL-1.0", - "seeAlso": [ - "https://opensource.org/licenses/VSL-1.0" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/CPAL-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CPAL-1.0.json", - "referenceNumber": 205, - "name": "Common Public Attribution License 1.0", - "licenseId": "CPAL-1.0", - "seeAlso": [ - "https://opensource.org/licenses/CPAL-1.0" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/HaskellReport.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/HaskellReport.json", - "referenceNumber": 206, - "name": "Haskell Language Report License", - "licenseId": "HaskellReport", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/Haskell_Language_Report_License" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/APSL-1.1.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/APSL-1.1.json", - "referenceNumber": 207, - "name": "Apple Public Source License 1.1", - "licenseId": "APSL-1.1", - "seeAlso": [ - "http://www.opensource.apple.com/source/IOSerialFamily/IOSerialFamily-7/APPLE_LICENSE" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/GPL-2.0-or-later.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/GPL-2.0-or-later.json", - "referenceNumber": 208, - "name": "GNU General Public License v2.0 or later", - "licenseId": "GPL-2.0-or-later", - "seeAlso": [ - "https://www.gnu.org/licenses/old-licenses/gpl-2.0-standalone.html", - "https://opensource.org/licenses/GPL-2.0" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/BSD-3-Clause-Modification.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/BSD-3-Clause-Modification.json", - "referenceNumber": 209, - "name": "BSD 3-Clause Modification", - "licenseId": "BSD-3-Clause-Modification", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing:BSD#Modification_Variant" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/OLDAP-2.3.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/OLDAP-2.3.json", - "referenceNumber": 210, - "name": "Open LDAP Public License v2.3", - "licenseId": "OLDAP-2.3", - "seeAlso": [ - "http://www.openldap.org/devel/gitweb.cgi?p\u003dopenldap.git;a\u003dblob;f\u003dLICENSE;hb\u003dd32cf54a32d581ab475d23c810b0a7fbaf8d63c3" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/OFL-1.1-no-RFN.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/OFL-1.1-no-RFN.json", - "referenceNumber": 211, - "name": "SIL Open Font License 1.1 with no Reserved Font Name", - "licenseId": "OFL-1.1-no-RFN", - "seeAlso": [ - "http://scripts.sil.org/cms/scripts/page.php?item_id\u003dOFL_web", - "https://opensource.org/licenses/OFL-1.1" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/BitTorrent-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/BitTorrent-1.0.json", - "referenceNumber": 212, - "name": "BitTorrent Open Source License v1.0", - "licenseId": "BitTorrent-1.0", - "seeAlso": [ - "http://sources.gentoo.org/cgi-bin/viewvc.cgi/gentoo-x86/licenses/BitTorrent?r1\u003d1.1\u0026r2\u003d1.1.1.1\u0026diff_format\u003ds" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/NRL.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/NRL.json", - "referenceNumber": 213, - "name": "NRL License", - "licenseId": "NRL", - "seeAlso": [ - "http://web.mit.edu/network/isakmp/nrllicense.html" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/GFDL-1.2.html", - "isDeprecatedLicenseId": true, - "detailsUrl": "https://spdx.org/licenses/GFDL-1.2.json", - "referenceNumber": 214, - "name": "GNU Free Documentation License v1.2", - "licenseId": "GFDL-1.2", - "seeAlso": [ - "https://www.gnu.org/licenses/old-licenses/fdl-1.2.txt" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/MirOS.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/MirOS.json", - "referenceNumber": 215, - "name": "The MirOS Licence", - "licenseId": "MirOS", - "seeAlso": [ - "https://opensource.org/licenses/MirOS" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/Sleepycat.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Sleepycat.json", - "referenceNumber": 216, - "name": "Sleepycat License", - "licenseId": "Sleepycat", - "seeAlso": [ - "https://opensource.org/licenses/Sleepycat" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/LPPL-1.1.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/LPPL-1.1.json", - "referenceNumber": 217, - "name": "LaTeX Project Public License v1.1", - "licenseId": "LPPL-1.1", - "seeAlso": [ - "http://www.latex-project.org/lppl/lppl-1-1.txt" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/WTFPL.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/WTFPL.json", - "referenceNumber": 218, - "name": "Do What The F*ck You Want To Public License", - "licenseId": "WTFPL", - "seeAlso": [ - "http://www.wtfpl.net/about/", - "http://sam.zoy.org/wtfpl/COPYING" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/PolyForm-Small-Business-1.0.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/PolyForm-Small-Business-1.0.0.json", - "referenceNumber": 219, - "name": "PolyForm Small Business License 1.0.0", - "licenseId": "PolyForm-Small-Business-1.0.0", - "seeAlso": [ - "https://polyformproject.org/licenses/small-business/1.0.0" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/Caldera.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Caldera.json", - "referenceNumber": 220, - "name": "Caldera License", - "licenseId": "Caldera", - "seeAlso": [ - "http://www.lemis.com/grog/UNIX/ancient-source-all.pdf" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/HTMLTIDY.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/HTMLTIDY.json", - "referenceNumber": 221, - "name": "HTML Tidy License", - "licenseId": "HTMLTIDY", - "seeAlso": [ - "https://github.com/htacg/tidy-html5/blob/next/README/LICENSE.md" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/SISSL.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/SISSL.json", - "referenceNumber": 222, - "name": "Sun Industry Standards Source License v1.1", - "licenseId": "SISSL", - "seeAlso": [ - "http://www.openoffice.org/licenses/sissl_license.html", - "https://opensource.org/licenses/SISSL" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/MITNFA.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/MITNFA.json", - "referenceNumber": 223, - "name": "MIT +no-false-attribs license", - "licenseId": "MITNFA", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/MITNFA" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/0BSD.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/0BSD.json", - "referenceNumber": 224, - "name": "BSD Zero Clause License", - "licenseId": "0BSD", - "seeAlso": [ - "http://landley.net/toybox/license.html" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/CC0-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CC0-1.0.json", - "referenceNumber": 225, - "name": "Creative Commons Zero v1.0 Universal", - "licenseId": "CC0-1.0", - "seeAlso": [ - "https://creativecommons.org/publicdomain/zero/1.0/legalcode" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/LGPL-3.0+.html", - "isDeprecatedLicenseId": true, - "detailsUrl": "https://spdx.org/licenses/LGPL-3.0+.json", - "referenceNumber": 226, - "name": "GNU Lesser General Public License v3.0 or later", - "licenseId": "LGPL-3.0+", - "seeAlso": [ - "https://www.gnu.org/licenses/lgpl-3.0-standalone.html", - "https://opensource.org/licenses/LGPL-3.0" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/CDLA-Sharing-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CDLA-Sharing-1.0.json", - "referenceNumber": 227, - "name": "Community Data License Agreement Sharing 1.0", - "licenseId": "CDLA-Sharing-1.0", - "seeAlso": [ - "https://cdla.io/sharing-1-0" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/GPL-2.0-with-bison-exception.html", - "isDeprecatedLicenseId": true, - "detailsUrl": "https://spdx.org/licenses/GPL-2.0-with-bison-exception.json", - "referenceNumber": 228, - "name": "GNU General Public License v2.0 w/Bison exception", - "licenseId": "GPL-2.0-with-bison-exception", - "seeAlso": [ - "http://git.savannah.gnu.org/cgit/bison.git/tree/data/yacc.c?id\u003d193d7c7054ba7197b0789e14965b739162319b5e#n141" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/EFL-2.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/EFL-2.0.json", - "referenceNumber": 229, - "name": "Eiffel Forum License v2.0", - "licenseId": "EFL-2.0", - "seeAlso": [ - "http://www.eiffel-nice.org/license/eiffel-forum-license-2.html", - "https://opensource.org/licenses/EFL-2.0" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/AFL-1.1.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/AFL-1.1.json", - "referenceNumber": 230, - "name": "Academic Free License v1.1", - "licenseId": "AFL-1.1", - "seeAlso": [ - "http://opensource.linux-mirror.org/licenses/afl-1.1.txt", - "http://wayback.archive.org/web/20021004124254/http://www.opensource.org/licenses/academic.php" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/CC-BY-2.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CC-BY-2.0.json", - "referenceNumber": 231, - "name": "Creative Commons Attribution 2.0 Generic", - "licenseId": "CC-BY-2.0", - "seeAlso": [ - "https://creativecommons.org/licenses/by/2.0/legalcode" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/RPL-1.5.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/RPL-1.5.json", - "referenceNumber": 232, - "name": "Reciprocal Public License 1.5", - "licenseId": "RPL-1.5", - "seeAlso": [ - "https://opensource.org/licenses/RPL-1.5" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/MulanPSL-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/MulanPSL-1.0.json", - "referenceNumber": 233, - "name": "Mulan Permissive Software License, Version 1", - "licenseId": "MulanPSL-1.0", - "seeAlso": [ - "https://license.coscl.org.cn/MulanPSL/", - "https://github.com/yuwenlong/longphp/blob/25dfb70cc2a466dc4bb55ba30901cbce08d164b5/LICENSE" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/GPL-3.0+.html", - "isDeprecatedLicenseId": true, - "detailsUrl": "https://spdx.org/licenses/GPL-3.0+.json", - "referenceNumber": 234, - "name": "GNU General Public License v3.0 or later", - "licenseId": "GPL-3.0+", - "seeAlso": [ - "https://www.gnu.org/licenses/gpl-3.0-standalone.html", - "https://opensource.org/licenses/GPL-3.0" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/HPND-sell-variant.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/HPND-sell-variant.json", - "referenceNumber": 235, - "name": "Historical Permission Notice and Disclaimer - sell variant", - "licenseId": "HPND-sell-variant", - "seeAlso": [ - "https://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git/tree/net/sunrpc/auth_gss/gss_generic_token.c?h\u003dv4.19" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/SSH-OpenSSH.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/SSH-OpenSSH.json", - "referenceNumber": 236, - "name": "SSH OpenSSH license", - "licenseId": "SSH-OpenSSH", - "seeAlso": [ - "https://github.com/openssh/openssh-portable/blob/1b11ea7c58cd5c59838b5fa574cd456d6047b2d4/LICENCE#L10" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/OLDAP-1.1.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/OLDAP-1.1.json", - "referenceNumber": 237, - "name": "Open LDAP Public License v1.1", - "licenseId": "OLDAP-1.1", - "seeAlso": [ - "http://www.openldap.org/devel/gitweb.cgi?p\u003dopenldap.git;a\u003dblob;f\u003dLICENSE;hb\u003d806557a5ad59804ef3a44d5abfbe91d706b0791f" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/BitTorrent-1.1.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/BitTorrent-1.1.json", - "referenceNumber": 238, - "name": "BitTorrent Open Source License v1.1", - "licenseId": "BitTorrent-1.1", - "seeAlso": [ - "http://directory.fsf.org/wiki/License:BitTorrentOSL1.1" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/Artistic-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Artistic-1.0.json", - "referenceNumber": 239, - "name": "Artistic License 1.0", - "licenseId": "Artistic-1.0", - "seeAlso": [ - "https://opensource.org/licenses/Artistic-1.0" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/SSH-short.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/SSH-short.json", - "referenceNumber": 240, - "name": "SSH short notice", - "licenseId": "SSH-short", - "seeAlso": [ - "https://github.com/openssh/openssh-portable/blob/1b11ea7c58cd5c59838b5fa574cd456d6047b2d4/pathnames.h", - "http://web.mit.edu/kolya/.f/root/athena.mit.edu/sipb.mit.edu/project/openssh/OldFiles/src/openssh-2.9.9p2/ssh-add.1", - "https://joinup.ec.europa.eu/svn/lesoll/trunk/italc/lib/src/dsa_key.cpp" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/CC-BY-3.0-AT.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CC-BY-3.0-AT.json", - "referenceNumber": 241, - "name": "Creative Commons Attribution 3.0 Austria", - "licenseId": "CC-BY-3.0-AT", - "seeAlso": [ - "https://creativecommons.org/licenses/by/3.0/at/legalcode" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/MIT-CMU.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/MIT-CMU.json", - "referenceNumber": 242, - "name": "CMU License", - "licenseId": "MIT-CMU", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing:MIT?rd\u003dLicensing/MIT#CMU_Style", - "https://github.com/python-pillow/Pillow/blob/fffb426092c8db24a5f4b6df243a8a3c01fb63cd/LICENSE" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/GFDL-1.3-no-invariants-or-later.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/GFDL-1.3-no-invariants-or-later.json", - "referenceNumber": 243, - "name": "GNU Free Documentation License v1.3 or later - no invariants", - "licenseId": "GFDL-1.3-no-invariants-or-later", - "seeAlso": [ - "https://www.gnu.org/licenses/fdl-1.3.txt" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/TOSL.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/TOSL.json", - "referenceNumber": 244, - "name": "Trusster Open Source License", - "licenseId": "TOSL", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/TOSL" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/MIT-open-group.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/MIT-open-group.json", - "referenceNumber": 245, - "name": "MIT Open Group variant", - "licenseId": "MIT-open-group", - "seeAlso": [ - "https://gitlab.freedesktop.org/xorg/app/iceauth/-/blob/master/COPYING", - "https://gitlab.freedesktop.org/xorg/app/xvinfo/-/blob/master/COPYING", - "https://gitlab.freedesktop.org/xorg/app/xsetroot/-/blob/master/COPYING", - "https://gitlab.freedesktop.org/xorg/app/xauth/-/blob/master/COPYING" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/OLDAP-2.6.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/OLDAP-2.6.json", - "referenceNumber": 246, - "name": "Open LDAP Public License v2.6", - "licenseId": "OLDAP-2.6", - "seeAlso": [ - "http://www.openldap.org/devel/gitweb.cgi?p\u003dopenldap.git;a\u003dblob;f\u003dLICENSE;hb\u003d1cae062821881f41b73012ba816434897abf4205" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/GFDL-1.1-only.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/GFDL-1.1-only.json", - "referenceNumber": 247, - "name": "GNU Free Documentation License v1.1 only", - "licenseId": "GFDL-1.1-only", - "seeAlso": [ - "https://www.gnu.org/licenses/old-licenses/fdl-1.1.txt" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/FreeBSD-DOC.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/FreeBSD-DOC.json", - "referenceNumber": 248, - "name": "FreeBSD Documentation License", - "licenseId": "FreeBSD-DOC", - "seeAlso": [ - "https://www.freebsd.org/copyright/freebsd-doc-license/" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/GPL-2.0.html", - "isDeprecatedLicenseId": true, - "detailsUrl": "https://spdx.org/licenses/GPL-2.0.json", - "referenceNumber": 249, - "name": "GNU General Public License v2.0 only", - "licenseId": "GPL-2.0", - "seeAlso": [ - "https://www.gnu.org/licenses/old-licenses/gpl-2.0-standalone.html", - "https://opensource.org/licenses/GPL-2.0" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/Fair.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Fair.json", - "referenceNumber": 250, - "name": "Fair License", - "licenseId": "Fair", - "seeAlso": [ - "http://fairlicense.org/", - "https://opensource.org/licenses/Fair" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/CECILL-1.1.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CECILL-1.1.json", - "referenceNumber": 251, - "name": "CeCILL Free Software License Agreement v1.1", - "licenseId": "CECILL-1.1", - "seeAlso": [ - "http://www.cecill.info/licences/Licence_CeCILL_V1.1-US.html" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/QPL-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/QPL-1.0.json", - "referenceNumber": 252, - "name": "Q Public License 1.0", - "licenseId": "QPL-1.0", - "seeAlso": [ - "http://doc.qt.nokia.com/3.3/license.html", - "https://opensource.org/licenses/QPL-1.0" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/DOC.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/DOC.json", - "referenceNumber": 253, - "name": "DOC License", - "licenseId": "DOC", - "seeAlso": [ - "http://www.cs.wustl.edu/~schmidt/ACE-copying.html", - "https://www.dre.vanderbilt.edu/~schmidt/ACE-copying.html" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/LAL-1.2.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/LAL-1.2.json", - "referenceNumber": 254, - "name": "Licence Art Libre 1.2", - "licenseId": "LAL-1.2", - "seeAlso": [ - "http://artlibre.org/licence/lal/licence-art-libre-12/" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/LPL-1.02.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/LPL-1.02.json", - "referenceNumber": 255, - "name": "Lucent Public License v1.02", - "licenseId": "LPL-1.02", - "seeAlso": [ - "http://plan9.bell-labs.com/plan9/license.html", - "https://opensource.org/licenses/LPL-1.02" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/CERN-OHL-P-2.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CERN-OHL-P-2.0.json", - "referenceNumber": 256, - "name": "CERN Open Hardware Licence Version 2 - Permissive", - "licenseId": "CERN-OHL-P-2.0", - "seeAlso": [ - "https://www.ohwr.org/project/cernohl/wikis/Documents/CERN-OHL-version-2" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/etalab-2.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/etalab-2.0.json", - "referenceNumber": 257, - "name": "Etalab Open License 2.0", - "licenseId": "etalab-2.0", - "seeAlso": [ - "https://github.com/DISIC/politique-de-contribution-open-source/blob/master/LICENSE.pdf", - "https://raw.githubusercontent.com/DISIC/politique-de-contribution-open-source/master/LICENSE" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/FTL.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/FTL.json", - "referenceNumber": 258, - "name": "Freetype Project License", - "licenseId": "FTL", - "seeAlso": [ - "http://freetype.fis.uniroma2.it/FTL.TXT", - "http://git.savannah.gnu.org/cgit/freetype/freetype2.git/tree/docs/FTL.TXT", - "http://gitlab.freedesktop.org/freetype/freetype/-/raw/master/docs/FTL.TXT" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/Qhull.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Qhull.json", - "referenceNumber": 259, - "name": "Qhull License", - "licenseId": "Qhull", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/Qhull" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/BSD-3-Clause-Clear.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/BSD-3-Clause-Clear.json", - "referenceNumber": 260, - "name": "BSD 3-Clause Clear License", - "licenseId": "BSD-3-Clause-Clear", - "seeAlso": [ - "http://labs.metacarta.com/license-explanation.html#license" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/BSD-3-Clause-No-Military-License.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/BSD-3-Clause-No-Military-License.json", - "referenceNumber": 261, - "name": "BSD 3-Clause No Military License", - "licenseId": "BSD-3-Clause-No-Military-License", - "seeAlso": [ - "https://gitlab.syncad.com/hive/dhive/-/blob/master/LICENSE", - "https://github.com/greymass/swift-eosio/blob/master/LICENSE" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/FSFAP.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/FSFAP.json", - "referenceNumber": 262, - "name": "FSF All Permissive License", - "licenseId": "FSFAP", - "seeAlso": [ - "https://www.gnu.org/prep/maintain/html_node/License-Notices-for-Other-Files.html" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/APL-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/APL-1.0.json", - "referenceNumber": 263, - "name": "Adaptive Public License 1.0", - "licenseId": "APL-1.0", - "seeAlso": [ - "https://opensource.org/licenses/APL-1.0" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/OLDAP-2.8.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/OLDAP-2.8.json", - "referenceNumber": 264, - "name": "Open LDAP Public License v2.8", - "licenseId": "OLDAP-2.8", - "seeAlso": [ - "http://www.openldap.org/software/release/license.html" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/TORQUE-1.1.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/TORQUE-1.1.json", - "referenceNumber": 265, - "name": "TORQUE v2.5+ Software License v1.1", - "licenseId": "TORQUE-1.1", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/TORQUEv1.1" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/Sendmail-8.23.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Sendmail-8.23.json", - "referenceNumber": 266, - "name": "Sendmail License 8.23", - "licenseId": "Sendmail-8.23", - "seeAlso": [ - "https://www.proofpoint.com/sites/default/files/sendmail-license.pdf", - "https://web.archive.org/web/20181003101040/https://www.proofpoint.com/sites/default/files/sendmail-license.pdf" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/diffmark.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/diffmark.json", - "referenceNumber": 267, - "name": "diffmark license", - "licenseId": "diffmark", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/diffmark" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/Frameworx-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Frameworx-1.0.json", - "referenceNumber": 268, - "name": "Frameworx Open License 1.0", - "licenseId": "Frameworx-1.0", - "seeAlso": [ - "https://opensource.org/licenses/Frameworx-1.0" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/zlib-acknowledgement.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/zlib-acknowledgement.json", - "referenceNumber": 269, - "name": "zlib/libpng License with Acknowledgement", - "licenseId": "zlib-acknowledgement", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/ZlibWithAcknowledgement" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/EFL-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/EFL-1.0.json", - "referenceNumber": 270, - "name": "Eiffel Forum License v1.0", - "licenseId": "EFL-1.0", - "seeAlso": [ - "http://www.eiffel-nice.org/license/forum.txt", - "https://opensource.org/licenses/EFL-1.0" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/IJG.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/IJG.json", - "referenceNumber": 271, - "name": "Independent JPEG Group License", - "licenseId": "IJG", - "seeAlso": [ - "http://dev.w3.org/cvsweb/Amaya/libjpeg/Attic/README?rev\u003d1.2" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/GFDL-1.3-no-invariants-only.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/GFDL-1.3-no-invariants-only.json", - "referenceNumber": 272, - "name": "GNU Free Documentation License v1.3 only - no invariants", - "licenseId": "GFDL-1.3-no-invariants-only", - "seeAlso": [ - "https://www.gnu.org/licenses/fdl-1.3.txt" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/Noweb.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Noweb.json", - "referenceNumber": 273, - "name": "Noweb License", - "licenseId": "Noweb", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/Noweb" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/GFDL-1.3.html", - "isDeprecatedLicenseId": true, - "detailsUrl": "https://spdx.org/licenses/GFDL-1.3.json", - "referenceNumber": 274, - "name": "GNU Free Documentation License v1.3", - "licenseId": "GFDL-1.3", - "seeAlso": [ - "https://www.gnu.org/licenses/fdl-1.3.txt" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/LGPL-2.1.html", - "isDeprecatedLicenseId": true, - "detailsUrl": "https://spdx.org/licenses/LGPL-2.1.json", - "referenceNumber": 275, - "name": "GNU Lesser General Public License v2.1 only", - "licenseId": "LGPL-2.1", - "seeAlso": [ - "https://www.gnu.org/licenses/old-licenses/lgpl-2.1-standalone.html", - "https://opensource.org/licenses/LGPL-2.1" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/gSOAP-1.3b.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/gSOAP-1.3b.json", - "referenceNumber": 276, - "name": "gSOAP Public License v1.3b", - "licenseId": "gSOAP-1.3b", - "seeAlso": [ - "http://www.cs.fsu.edu/~engelen/license.html" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/OFL-1.1-RFN.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/OFL-1.1-RFN.json", - "referenceNumber": 277, - "name": "SIL Open Font License 1.1 with Reserved Font Name", - "licenseId": "OFL-1.1-RFN", - "seeAlso": [ - "http://scripts.sil.org/cms/scripts/page.php?item_id\u003dOFL_web", - "https://opensource.org/licenses/OFL-1.1" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/GPL-3.0-with-autoconf-exception.html", - "isDeprecatedLicenseId": true, - "detailsUrl": "https://spdx.org/licenses/GPL-3.0-with-autoconf-exception.json", - "referenceNumber": 278, - "name": "GNU General Public License v3.0 w/Autoconf exception", - "licenseId": "GPL-3.0-with-autoconf-exception", - "seeAlso": [ - "https://www.gnu.org/licenses/autoconf-exception-3.0.html" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/CERN-OHL-1.1.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CERN-OHL-1.1.json", - "referenceNumber": 279, - "name": "CERN Open Hardware Licence v1.1", - "licenseId": "CERN-OHL-1.1", - "seeAlso": [ - "https://www.ohwr.org/project/licenses/wikis/cern-ohl-v1.1" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/AFL-2.1.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/AFL-2.1.json", - "referenceNumber": 280, - "name": "Academic Free License v2.1", - "licenseId": "AFL-2.1", - "seeAlso": [ - "http://opensource.linux-mirror.org/licenses/afl-2.1.txt" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/MIT-enna.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/MIT-enna.json", - "referenceNumber": 281, - "name": "enna License", - "licenseId": "MIT-enna", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/MIT#enna" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/Adobe-Glyph.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Adobe-Glyph.json", - "referenceNumber": 282, - "name": "Adobe Glyph List License", - "licenseId": "Adobe-Glyph", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/MIT#AdobeGlyph" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/EPL-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/EPL-1.0.json", - "referenceNumber": 283, - "name": "Eclipse Public License 1.0", - "licenseId": "EPL-1.0", - "seeAlso": [ - "http://www.eclipse.org/legal/epl-v10.html", - "https://opensource.org/licenses/EPL-1.0" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/Xerox.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Xerox.json", - "referenceNumber": 284, - "name": "Xerox License", - "licenseId": "Xerox", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/Xerox" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/OLDAP-2.0.1.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/OLDAP-2.0.1.json", - "referenceNumber": 285, - "name": "Open LDAP Public License v2.0.1", - "licenseId": "OLDAP-2.0.1", - "seeAlso": [ - "http://www.openldap.org/devel/gitweb.cgi?p\u003dopenldap.git;a\u003dblob;f\u003dLICENSE;hb\u003db6d68acd14e51ca3aab4428bf26522aa74873f0e" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/MTLL.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/MTLL.json", - "referenceNumber": 286, - "name": "Matrix Template Library License", - "licenseId": "MTLL", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/Matrix_Template_Library_License" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/ImageMagick.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/ImageMagick.json", - "referenceNumber": 287, - "name": "ImageMagick License", - "licenseId": "ImageMagick", - "seeAlso": [ - "http://www.imagemagick.org/script/license.php" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/psutils.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/psutils.json", - "referenceNumber": 288, - "name": "psutils License", - "licenseId": "psutils", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/psutils" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/ClArtistic.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/ClArtistic.json", - "referenceNumber": 289, - "name": "Clarified Artistic License", - "licenseId": "ClArtistic", - "seeAlso": [ - "http://gianluca.dellavedova.org/2011/01/03/clarified-artistic-license/", - "http://www.ncftp.com/ncftp/doc/LICENSE.txt" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/GFDL-1.3-invariants-or-later.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/GFDL-1.3-invariants-or-later.json", - "referenceNumber": 290, - "name": "GNU Free Documentation License v1.3 or later - invariants", - "licenseId": "GFDL-1.3-invariants-or-later", - "seeAlso": [ - "https://www.gnu.org/licenses/fdl-1.3.txt" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/APSL-1.2.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/APSL-1.2.json", - "referenceNumber": 291, - "name": "Apple Public Source License 1.2", - "licenseId": "APSL-1.2", - "seeAlso": [ - "http://www.samurajdata.se/opensource/mirror/licenses/apsl.php" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/Apache-2.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Apache-2.0.json", - "referenceNumber": 292, - "name": "Apache License 2.0", - "licenseId": "Apache-2.0", - "seeAlso": [ - "http://www.apache.org/licenses/LICENSE-2.0", - "https://opensource.org/licenses/Apache-2.0" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/NIST-PD.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/NIST-PD.json", - "referenceNumber": 293, - "name": "NIST Public Domain Notice", - "licenseId": "NIST-PD", - "seeAlso": [ - "https://github.com/tcheneau/simpleRPL/blob/e645e69e38dd4e3ccfeceb2db8cba05b7c2e0cd3/LICENSE.txt", - "https://github.com/tcheneau/Routing/blob/f09f46fcfe636107f22f2c98348188a65a135d98/README.md" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/Libpng.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Libpng.json", - "referenceNumber": 294, - "name": "libpng License", - "licenseId": "Libpng", - "seeAlso": [ - "http://www.libpng.org/pub/png/src/libpng-LICENSE.txt" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/TAPR-OHL-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/TAPR-OHL-1.0.json", - "referenceNumber": 295, - "name": "TAPR Open Hardware License v1.0", - "licenseId": "TAPR-OHL-1.0", - "seeAlso": [ - "https://www.tapr.org/OHL" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/ICU.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/ICU.json", - "referenceNumber": 296, - "name": "ICU License", - "licenseId": "ICU", - "seeAlso": [ - "http://source.icu-project.org/repos/icu/icu/trunk/license.html" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/CC-BY-SA-2.5.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CC-BY-SA-2.5.json", - "referenceNumber": 297, - "name": "Creative Commons Attribution Share Alike 2.5 Generic", - "licenseId": "CC-BY-SA-2.5", - "seeAlso": [ - "https://creativecommons.org/licenses/by-sa/2.5/legalcode" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/CC-PDDC.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CC-PDDC.json", - "referenceNumber": 298, - "name": "Creative Commons Public Domain Dedication and Certification", - "licenseId": "CC-PDDC", - "seeAlso": [ - "https://creativecommons.org/licenses/publicdomain/" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/AGPL-3.0-only.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/AGPL-3.0-only.json", - "referenceNumber": 299, - "name": "GNU Affero General Public License v3.0 only", - "licenseId": "AGPL-3.0-only", - "seeAlso": [ - "https://www.gnu.org/licenses/agpl.txt", - "https://opensource.org/licenses/AGPL-3.0" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/OSL-1.1.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/OSL-1.1.json", - "referenceNumber": 300, - "name": "Open Software License 1.1", - "licenseId": "OSL-1.1", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/OSL1.1" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/SugarCRM-1.1.3.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/SugarCRM-1.1.3.json", - "referenceNumber": 301, - "name": "SugarCRM Public License v1.1.3", - "licenseId": "SugarCRM-1.1.3", - "seeAlso": [ - "http://www.sugarcrm.com/crm/SPL" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/FreeImage.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/FreeImage.json", - "referenceNumber": 302, - "name": "FreeImage Public License v1.0", - "licenseId": "FreeImage", - "seeAlso": [ - "http://freeimage.sourceforge.net/freeimage-license.txt" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/W3C-20150513.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/W3C-20150513.json", - "referenceNumber": 303, - "name": "W3C Software Notice and Document License (2015-05-13)", - "licenseId": "W3C-20150513", - "seeAlso": [ - "https://www.w3.org/Consortium/Legal/2015/copyright-software-and-document" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/D-FSL-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/D-FSL-1.0.json", - "referenceNumber": 304, - "name": "Deutsche Freie Software Lizenz", - "licenseId": "D-FSL-1.0", - "seeAlso": [ - "http://www.dipp.nrw.de/d-fsl/lizenzen/", - "http://www.dipp.nrw.de/d-fsl/index_html/lizenzen/de/D-FSL-1_0_de.txt", - "http://www.dipp.nrw.de/d-fsl/index_html/lizenzen/en/D-FSL-1_0_en.txt", - "https://www.hbz-nrw.de/produkte/open-access/lizenzen/dfsl", - "https://www.hbz-nrw.de/produkte/open-access/lizenzen/dfsl/deutsche-freie-software-lizenz", - "https://www.hbz-nrw.de/produkte/open-access/lizenzen/dfsl/german-free-software-license", - "https://www.hbz-nrw.de/produkte/open-access/lizenzen/dfsl/D-FSL-1_0_de.txt/at_download/file", - "https://www.hbz-nrw.de/produkte/open-access/lizenzen/dfsl/D-FSL-1_0_en.txt/at_download/file" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/RSA-MD.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/RSA-MD.json", - "referenceNumber": 305, - "name": "RSA Message-Digest License", - "licenseId": "RSA-MD", - "seeAlso": [ - "http://www.faqs.org/rfcs/rfc1321.html" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/CC-BY-ND-2.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CC-BY-ND-2.0.json", - "referenceNumber": 306, - "name": "Creative Commons Attribution No Derivatives 2.0 Generic", - "licenseId": "CC-BY-ND-2.0", - "seeAlso": [ - "https://creativecommons.org/licenses/by-nd/2.0/legalcode" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/GPL-2.0-with-GCC-exception.html", - "isDeprecatedLicenseId": true, - "detailsUrl": "https://spdx.org/licenses/GPL-2.0-with-GCC-exception.json", - "referenceNumber": 307, - "name": "GNU General Public License v2.0 w/GCC Runtime Library exception", - "licenseId": "GPL-2.0-with-GCC-exception", - "seeAlso": [ - "https://gcc.gnu.org/git/?p\u003dgcc.git;a\u003dblob;f\u003dgcc/libgcc1.c;h\u003d762f5143fc6eed57b6797c82710f3538aa52b40b;hb\u003dcb143a3ce4fb417c68f5fa2691a1b1b1053dfba9#l10" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/AGPL-3.0-or-later.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/AGPL-3.0-or-later.json", - "referenceNumber": 308, - "name": "GNU Affero General Public License v3.0 or later", - "licenseId": "AGPL-3.0-or-later", - "seeAlso": [ - "https://www.gnu.org/licenses/agpl.txt", - "https://opensource.org/licenses/AGPL-3.0" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/AGPL-1.0-or-later.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/AGPL-1.0-or-later.json", - "referenceNumber": 309, - "name": "Affero General Public License v1.0 or later", - "licenseId": "AGPL-1.0-or-later", - "seeAlso": [ - "http://www.affero.org/oagpl.html" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/iMatix.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/iMatix.json", - "referenceNumber": 310, - "name": "iMatix Standard Function Library Agreement", - "licenseId": "iMatix", - "seeAlso": [ - "http://legacy.imatix.com/html/sfl/sfl4.htm#license" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/Plexus.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Plexus.json", - "referenceNumber": 311, - "name": "Plexus Classworlds License", - "licenseId": "Plexus", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/Plexus_Classworlds_License" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/OFL-1.0-no-RFN.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/OFL-1.0-no-RFN.json", - "referenceNumber": 312, - "name": "SIL Open Font License 1.0 with no Reserved Font Name", - "licenseId": "OFL-1.0-no-RFN", - "seeAlso": [ - "http://scripts.sil.org/cms/scripts/page.php?item_id\u003dOFL10_web" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/NAIST-2003.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/NAIST-2003.json", - "referenceNumber": 313, - "name": "Nara Institute of Science and Technology License (2003)", - "licenseId": "NAIST-2003", - "seeAlso": [ - "https://enterprise.dejacode.com/licenses/public/naist-2003/#license-text", - "https://github.com/nodejs/node/blob/4a19cc8947b1bba2b2d27816ec3d0edf9b28e503/LICENSE#L343" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/MIT-feh.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/MIT-feh.json", - "referenceNumber": 314, - "name": "feh License", - "licenseId": "MIT-feh", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/MIT#feh" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/ECL-2.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/ECL-2.0.json", - "referenceNumber": 315, - "name": "Educational Community License v2.0", - "licenseId": "ECL-2.0", - "seeAlso": [ - "https://opensource.org/licenses/ECL-2.0" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/CC-BY-2.5.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CC-BY-2.5.json", - "referenceNumber": 316, - "name": "Creative Commons Attribution 2.5 Generic", - "licenseId": "CC-BY-2.5", - "seeAlso": [ - "https://creativecommons.org/licenses/by/2.5/legalcode" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/XSkat.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/XSkat.json", - "referenceNumber": 317, - "name": "XSkat License", - "licenseId": "XSkat", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/XSkat_License" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/Linux-OpenIB.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Linux-OpenIB.json", - "referenceNumber": 318, - "name": "Linux Kernel Variant of OpenIB.org license", - "licenseId": "Linux-OpenIB", - "seeAlso": [ - "https://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git/tree/drivers/infiniband/core/sa.h" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/Spencer-99.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Spencer-99.json", - "referenceNumber": 319, - "name": "Spencer License 99", - "licenseId": "Spencer-99", - "seeAlso": [ - "http://www.opensource.apple.com/source/tcl/tcl-5/tcl/generic/regfronts.c" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/BSD-3-Clause-No-Nuclear-License-2014.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/BSD-3-Clause-No-Nuclear-License-2014.json", - "referenceNumber": 320, - "name": "BSD 3-Clause No Nuclear License 2014", - "licenseId": "BSD-3-Clause-No-Nuclear-License-2014", - "seeAlso": [ - "https://java.net/projects/javaeetutorial/pages/BerkeleyLicense" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/CC-BY-NC-ND-3.0-IGO.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-ND-3.0-IGO.json", - "referenceNumber": 321, - "name": "Creative Commons Attribution Non Commercial No Derivatives 3.0 IGO", - "licenseId": "CC-BY-NC-ND-3.0-IGO", - "seeAlso": [ - "https://creativecommons.org/licenses/by-nc-nd/3.0/igo/legalcode" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/CC-BY-NC-SA-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-SA-1.0.json", - "referenceNumber": 322, - "name": "Creative Commons Attribution Non Commercial Share Alike 1.0 Generic", - "licenseId": "CC-BY-NC-SA-1.0", - "seeAlso": [ - "https://creativecommons.org/licenses/by-nc-sa/1.0/legalcode" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/GPL-2.0-with-font-exception.html", - "isDeprecatedLicenseId": true, - "detailsUrl": "https://spdx.org/licenses/GPL-2.0-with-font-exception.json", - "referenceNumber": 323, - "name": "GNU General Public License v2.0 w/Font exception", - "licenseId": "GPL-2.0-with-font-exception", - "seeAlso": [ - "https://www.gnu.org/licenses/gpl-faq.html#FontException" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/Crossword.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Crossword.json", - "referenceNumber": 324, - "name": "Crossword License", - "licenseId": "Crossword", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/Crossword" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/OLDAP-2.2.2.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/OLDAP-2.2.2.json", - "referenceNumber": 325, - "name": "Open LDAP Public License 2.2.2", - "licenseId": "OLDAP-2.2.2", - "seeAlso": [ - "http://www.openldap.org/devel/gitweb.cgi?p\u003dopenldap.git;a\u003dblob;f\u003dLICENSE;hb\u003ddf2cc1e21eb7c160695f5b7cffd6296c151ba188" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/BSD-2-Clause-NetBSD.html", - "isDeprecatedLicenseId": true, - "detailsUrl": "https://spdx.org/licenses/BSD-2-Clause-NetBSD.json", - "referenceNumber": 326, - "name": "BSD 2-Clause NetBSD License", - "licenseId": "BSD-2-Clause-NetBSD", - "seeAlso": [ - "http://www.netbsd.org/about/redistribution.html#default" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/GPL-2.0+.html", - "isDeprecatedLicenseId": true, - "detailsUrl": "https://spdx.org/licenses/GPL-2.0+.json", - "referenceNumber": 327, - "name": "GNU General Public License v2.0 or later", - "licenseId": "GPL-2.0+", - "seeAlso": [ - "https://www.gnu.org/licenses/old-licenses/gpl-2.0-standalone.html", - "https://opensource.org/licenses/GPL-2.0" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/CC-BY-4.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CC-BY-4.0.json", - "referenceNumber": 328, - "name": "Creative Commons Attribution 4.0 International", - "licenseId": "CC-BY-4.0", - "seeAlso": [ - "https://creativecommons.org/licenses/by/4.0/legalcode" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/OLDAP-2.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/OLDAP-2.0.json", - "referenceNumber": 329, - "name": "Open LDAP Public License v2.0 (or possibly 2.0A and 2.0B)", - "licenseId": "OLDAP-2.0", - "seeAlso": [ - "http://www.openldap.org/devel/gitweb.cgi?p\u003dopenldap.git;a\u003dblob;f\u003dLICENSE;hb\u003dcbf50f4e1185a21abd4c0a54d3f4341fe28f36ea" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/NOSL.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/NOSL.json", - "referenceNumber": 330, - "name": "Netizen Open Source License", - "licenseId": "NOSL", - "seeAlso": [ - "http://bits.netizen.com.au/licenses/NOSL/nosl.txt" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/CDDL-1.1.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CDDL-1.1.json", - "referenceNumber": 331, - "name": "Common Development and Distribution License 1.1", - "licenseId": "CDDL-1.1", - "seeAlso": [ - "http://glassfish.java.net/public/CDDL+GPL_1_1.html", - "https://javaee.github.io/glassfish/LICENSE" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/APSL-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/APSL-1.0.json", - "referenceNumber": 332, - "name": "Apple Public Source License 1.0", - "licenseId": "APSL-1.0", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/Apple_Public_Source_License_1.0" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/EUPL-1.2.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/EUPL-1.2.json", - "referenceNumber": 333, - "name": "European Union Public License 1.2", - "licenseId": "EUPL-1.2", - "seeAlso": [ - "https://joinup.ec.europa.eu/page/eupl-text-11-12", - "https://joinup.ec.europa.eu/sites/default/files/custom-page/attachment/eupl_v1.2_en.pdf", - "https://joinup.ec.europa.eu/sites/default/files/custom-page/attachment/2020-03/EUPL-1.2%20EN.txt", - "https://joinup.ec.europa.eu/sites/default/files/inline-files/EUPL%20v1_2%20EN(1).txt", - "http://eur-lex.europa.eu/legal-content/EN/TXT/HTML/?uri\u003dCELEX:32017D0863", - "https://opensource.org/licenses/EUPL-1.2" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/Nokia.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Nokia.json", - "referenceNumber": 334, - "name": "Nokia Open Source License", - "licenseId": "Nokia", - "seeAlso": [ - "https://opensource.org/licenses/nokia" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/RHeCos-1.1.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/RHeCos-1.1.json", - "referenceNumber": 335, - "name": "Red Hat eCos Public License v1.1", - "licenseId": "RHeCos-1.1", - "seeAlso": [ - "http://ecos.sourceware.org/old-license.html" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/GPL-2.0-only.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/GPL-2.0-only.json", - "referenceNumber": 336, - "name": "GNU General Public License v2.0 only", - "licenseId": "GPL-2.0-only", - "seeAlso": [ - "https://www.gnu.org/licenses/old-licenses/gpl-2.0-standalone.html", - "https://opensource.org/licenses/GPL-2.0" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/OLDAP-2.7.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/OLDAP-2.7.json", - "referenceNumber": 337, - "name": "Open LDAP Public License v2.7", - "licenseId": "OLDAP-2.7", - "seeAlso": [ - "http://www.openldap.org/devel/gitweb.cgi?p\u003dopenldap.git;a\u003dblob;f\u003dLICENSE;hb\u003d47c2415c1df81556eeb39be6cad458ef87c534a2" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/Vim.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Vim.json", - "referenceNumber": 338, - "name": "Vim License", - "licenseId": "Vim", - "seeAlso": [ - "http://vimdoc.sourceforge.net/htmldoc/uganda.html" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/SAX-PD.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/SAX-PD.json", - "referenceNumber": 339, - "name": "Sax Public Domain Notice", - "licenseId": "SAX-PD", - "seeAlso": [ - "http://www.saxproject.org/copying.html" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/BSD-3-Clause-No-Nuclear-Warranty.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/BSD-3-Clause-No-Nuclear-Warranty.json", - "referenceNumber": 340, - "name": "BSD 3-Clause No Nuclear Warranty", - "licenseId": "BSD-3-Clause-No-Nuclear-Warranty", - "seeAlso": [ - "https://jogamp.org/git/?p\u003dgluegen.git;a\u003dblob_plain;f\u003dLICENSE.txt" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/NetCDF.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/NetCDF.json", - "referenceNumber": 341, - "name": "NetCDF license", - "licenseId": "NetCDF", - "seeAlso": [ - "http://www.unidata.ucar.edu/software/netcdf/copyright.html" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/dvipdfm.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/dvipdfm.json", - "referenceNumber": 342, - "name": "dvipdfm License", - "licenseId": "dvipdfm", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/dvipdfm" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/SHL-0.5.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/SHL-0.5.json", - "referenceNumber": 343, - "name": "Solderpad Hardware License v0.5", - "licenseId": "SHL-0.5", - "seeAlso": [ - "https://solderpad.org/licenses/SHL-0.5/" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/LGPL-2.0-only.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/LGPL-2.0-only.json", - "referenceNumber": 344, - "name": "GNU Library General Public License v2 only", - "licenseId": "LGPL-2.0-only", - "seeAlso": [ - "https://www.gnu.org/licenses/old-licenses/lgpl-2.0-standalone.html" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/AAL.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/AAL.json", - "referenceNumber": 345, - "name": "Attribution Assurance License", - "licenseId": "AAL", - "seeAlso": [ - "https://opensource.org/licenses/attribution" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/Unicode-TOU.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Unicode-TOU.json", - "referenceNumber": 346, - "name": "Unicode Terms of Use", - "licenseId": "Unicode-TOU", - "seeAlso": [ - "http://www.unicode.org/copyright.html" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/LPPL-1.2.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/LPPL-1.2.json", - "referenceNumber": 347, - "name": "LaTeX Project Public License v1.2", - "licenseId": "LPPL-1.2", - "seeAlso": [ - "http://www.latex-project.org/lppl/lppl-1-2.txt" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/xpp.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/xpp.json", - "referenceNumber": 348, - "name": "XPP License", - "licenseId": "xpp", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/xpp" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/SHL-0.51.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/SHL-0.51.json", - "referenceNumber": 349, - "name": "Solderpad Hardware License, Version 0.51", - "licenseId": "SHL-0.51", - "seeAlso": [ - "https://solderpad.org/licenses/SHL-0.51/" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/NCSA.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/NCSA.json", - "referenceNumber": 350, - "name": "University of Illinois/NCSA Open Source License", - "licenseId": "NCSA", - "seeAlso": [ - "http://otm.illinois.edu/uiuc_openSource", - "https://opensource.org/licenses/NCSA" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/LGPL-2.0-or-later.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/LGPL-2.0-or-later.json", - "referenceNumber": 351, - "name": "GNU Library General Public License v2 or later", - "licenseId": "LGPL-2.0-or-later", - "seeAlso": [ - "https://www.gnu.org/licenses/old-licenses/lgpl-2.0-standalone.html" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/CC-BY-3.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CC-BY-3.0.json", - "referenceNumber": 352, - "name": "Creative Commons Attribution 3.0 Unported", - "licenseId": "CC-BY-3.0", - "seeAlso": [ - "https://creativecommons.org/licenses/by/3.0/legalcode" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/GPL-1.0.html", - "isDeprecatedLicenseId": true, - "detailsUrl": "https://spdx.org/licenses/GPL-1.0.json", - "referenceNumber": 353, - "name": "GNU General Public License v1.0 only", - "licenseId": "GPL-1.0", - "seeAlso": [ - "https://www.gnu.org/licenses/old-licenses/gpl-1.0-standalone.html" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/W3C.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/W3C.json", - "referenceNumber": 354, - "name": "W3C Software Notice and License (2002-12-31)", - "licenseId": "W3C", - "seeAlso": [ - "http://www.w3.org/Consortium/Legal/2002/copyright-software-20021231.html", - "https://opensource.org/licenses/W3C" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/Aladdin.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Aladdin.json", - "referenceNumber": 355, - "name": "Aladdin Free Public License", - "licenseId": "Aladdin", - "seeAlso": [ - "http://pages.cs.wisc.edu/~ghost/doc/AFPL/6.01/Public.htm" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/BSD-3-Clause-No-Nuclear-License.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/BSD-3-Clause-No-Nuclear-License.json", - "referenceNumber": 356, - "name": "BSD 3-Clause No Nuclear License", - "licenseId": "BSD-3-Clause-No-Nuclear-License", - "seeAlso": [ - "http://download.oracle.com/otn-pub/java/licenses/bsd.txt?AuthParam\u003d1467140197_43d516ce1776bd08a58235a7785be1cc" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/GFDL-1.1-or-later.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/GFDL-1.1-or-later.json", - "referenceNumber": 357, - "name": "GNU Free Documentation License v1.1 or later", - "licenseId": "GFDL-1.1-or-later", - "seeAlso": [ - "https://www.gnu.org/licenses/old-licenses/fdl-1.1.txt" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/SMPPL.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/SMPPL.json", - "referenceNumber": 358, - "name": "Secure Messaging Protocol Public License", - "licenseId": "SMPPL", - "seeAlso": [ - "https://github.com/dcblake/SMP/blob/master/Documentation/License.txt" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/GFDL-1.1.html", - "isDeprecatedLicenseId": true, - "detailsUrl": "https://spdx.org/licenses/GFDL-1.1.json", - "referenceNumber": 359, - "name": "GNU Free Documentation License v1.1", - "licenseId": "GFDL-1.1", - "seeAlso": [ - "https://www.gnu.org/licenses/old-licenses/fdl-1.1.txt" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/OLDAP-1.4.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/OLDAP-1.4.json", - "referenceNumber": 360, - "name": "Open LDAP Public License v1.4", - "licenseId": "OLDAP-1.4", - "seeAlso": [ - "http://www.openldap.org/devel/gitweb.cgi?p\u003dopenldap.git;a\u003dblob;f\u003dLICENSE;hb\u003dc9f95c2f3f2ffb5e0ae55fe7388af75547660941" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/Condor-1.1.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Condor-1.1.json", - "referenceNumber": 361, - "name": "Condor Public License v1.1", - "licenseId": "Condor-1.1", - "seeAlso": [ - "http://research.cs.wisc.edu/condor/license.html#condor", - "http://web.archive.org/web/20111123062036/http://research.cs.wisc.edu/condor/license.html#condor" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/GPL-1.0-only.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/GPL-1.0-only.json", - "referenceNumber": 362, - "name": "GNU General Public License v1.0 only", - "licenseId": "GPL-1.0-only", - "seeAlso": [ - "https://www.gnu.org/licenses/old-licenses/gpl-1.0-standalone.html" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/GPL-3.0.html", - "isDeprecatedLicenseId": true, - "detailsUrl": "https://spdx.org/licenses/GPL-3.0.json", - "referenceNumber": 363, - "name": "GNU General Public License v3.0 only", - "licenseId": "GPL-3.0", - "seeAlso": [ - "https://www.gnu.org/licenses/gpl-3.0-standalone.html", - "https://opensource.org/licenses/GPL-3.0" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/PSF-2.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/PSF-2.0.json", - "referenceNumber": 364, - "name": "Python Software Foundation License 2.0", - "licenseId": "PSF-2.0", - "seeAlso": [ - "https://opensource.org/licenses/Python-2.0" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/Apache-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Apache-1.0.json", - "referenceNumber": 365, - "name": "Apache License 1.0", - "licenseId": "Apache-1.0", - "seeAlso": [ - "http://www.apache.org/licenses/LICENSE-1.0" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/EPL-2.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/EPL-2.0.json", - "referenceNumber": 366, - "name": "Eclipse Public License 2.0", - "licenseId": "EPL-2.0", - "seeAlso": [ - "https://www.eclipse.org/legal/epl-2.0", - "https://www.opensource.org/licenses/EPL-2.0" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/Python-2.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Python-2.0.json", - "referenceNumber": 367, - "name": "Python License 2.0", - "licenseId": "Python-2.0", - "seeAlso": [ - "https://opensource.org/licenses/Python-2.0" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/OLDAP-2.4.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/OLDAP-2.4.json", - "referenceNumber": 368, - "name": "Open LDAP Public License v2.4", - "licenseId": "OLDAP-2.4", - "seeAlso": [ - "http://www.openldap.org/devel/gitweb.cgi?p\u003dopenldap.git;a\u003dblob;f\u003dLICENSE;hb\u003dcd1284c4a91a8a380d904eee68d1583f989ed386" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/PostgreSQL.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/PostgreSQL.json", - "referenceNumber": 369, - "name": "PostgreSQL License", - "licenseId": "PostgreSQL", - "seeAlso": [ - "http://www.postgresql.org/about/licence", - "https://opensource.org/licenses/PostgreSQL" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/Net-SNMP.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Net-SNMP.json", - "referenceNumber": 370, - "name": "Net-SNMP License", - "licenseId": "Net-SNMP", - "seeAlso": [ - "http://net-snmp.sourceforge.net/about/license.html" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/Ruby.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Ruby.json", - "referenceNumber": 371, - "name": "Ruby License", - "licenseId": "Ruby", - "seeAlso": [ - "http://www.ruby-lang.org/en/LICENSE.txt" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/OSET-PL-2.1.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/OSET-PL-2.1.json", - "referenceNumber": 372, - "name": "OSET Public License version 2.1", - "licenseId": "OSET-PL-2.1", - "seeAlso": [ - "http://www.osetfoundation.org/public-license", - "https://opensource.org/licenses/OPL-2.1" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/Dotseqn.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Dotseqn.json", - "referenceNumber": 373, - "name": "Dotseqn License", - "licenseId": "Dotseqn", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/Dotseqn" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/CUA-OPL-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CUA-OPL-1.0.json", - "referenceNumber": 374, - "name": "CUA Office Public License v1.0", - "licenseId": "CUA-OPL-1.0", - "seeAlso": [ - "https://opensource.org/licenses/CUA-OPL-1.0" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/Bahyph.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Bahyph.json", - "referenceNumber": 375, - "name": "Bahyph License", - "licenseId": "Bahyph", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/Bahyph" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/LiLiQ-Rplus-1.1.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/LiLiQ-Rplus-1.1.json", - "referenceNumber": 376, - "name": "Licence Libre du Quรฉbec โ€“ Rรฉciprocitรฉ forte version 1.1", - "licenseId": "LiLiQ-Rplus-1.1", - "seeAlso": [ - "https://www.forge.gouv.qc.ca/participez/licence-logicielle/licence-libre-du-quebec-liliq-en-francais/licence-libre-du-quebec-reciprocite-forte-liliq-r-v1-1/", - "http://opensource.org/licenses/LiLiQ-Rplus-1.1" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/LGPL-2.0+.html", - "isDeprecatedLicenseId": true, - "detailsUrl": "https://spdx.org/licenses/LGPL-2.0+.json", - "referenceNumber": 377, - "name": "GNU Library General Public License v2 or later", - "licenseId": "LGPL-2.0+", - "seeAlso": [ - "https://www.gnu.org/licenses/old-licenses/lgpl-2.0-standalone.html" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/wxWindows.html", - "isDeprecatedLicenseId": true, - "detailsUrl": "https://spdx.org/licenses/wxWindows.json", - "referenceNumber": 378, - "name": "wxWindows Library License", - "licenseId": "wxWindows", - "seeAlso": [ - "https://opensource.org/licenses/WXwindows" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/AGPL-3.0.html", - "isDeprecatedLicenseId": true, - "detailsUrl": "https://spdx.org/licenses/AGPL-3.0.json", - "referenceNumber": 379, - "name": "GNU Affero General Public License v3.0", - "licenseId": "AGPL-3.0", - "seeAlso": [ - "https://www.gnu.org/licenses/agpl.txt", - "https://opensource.org/licenses/AGPL-3.0" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/Abstyles.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Abstyles.json", - "referenceNumber": 380, - "name": "Abstyles License", - "licenseId": "Abstyles", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/Abstyles" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/OLDAP-1.3.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/OLDAP-1.3.json", - "referenceNumber": 381, - "name": "Open LDAP Public License v1.3", - "licenseId": "OLDAP-1.3", - "seeAlso": [ - "http://www.openldap.org/devel/gitweb.cgi?p\u003dopenldap.git;a\u003dblob;f\u003dLICENSE;hb\u003de5f8117f0ce088d0bd7a8e18ddf37eaa40eb09b1" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/NTP-0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/NTP-0.json", - "referenceNumber": 382, - "name": "NTP No Attribution", - "licenseId": "NTP-0", - "seeAlso": [ - "https://github.com/tytso/e2fsprogs/blob/master/lib/et/et_name.c" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/OLDAP-2.2.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/OLDAP-2.2.json", - "referenceNumber": 383, - "name": "Open LDAP Public License v2.2", - "licenseId": "OLDAP-2.2", - "seeAlso": [ - "http://www.openldap.org/devel/gitweb.cgi?p\u003dopenldap.git;a\u003dblob;f\u003dLICENSE;hb\u003d470b0c18ec67621c85881b2733057fecf4a1acc3" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/CC-BY-SA-3.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CC-BY-SA-3.0.json", - "referenceNumber": 384, - "name": "Creative Commons Attribution Share Alike 3.0 Unported", - "licenseId": "CC-BY-SA-3.0", - "seeAlso": [ - "https://creativecommons.org/licenses/by-sa/3.0/legalcode" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/SWL.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/SWL.json", - "referenceNumber": 385, - "name": "Scheme Widget Library (SWL) Software License Agreement", - "licenseId": "SWL", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/SWL" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/BSD-3-Clause-Open-MPI.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/BSD-3-Clause-Open-MPI.json", - "referenceNumber": 386, - "name": "BSD 3-Clause Open MPI variant", - "licenseId": "BSD-3-Clause-Open-MPI", - "seeAlso": [ - "https://www.open-mpi.org/community/license.php", - "http://www.netlib.org/lapack/LICENSE.txt" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/LGPL-2.1+.html", - "isDeprecatedLicenseId": true, - "detailsUrl": "https://spdx.org/licenses/LGPL-2.1+.json", - "referenceNumber": 387, - "name": "GNU Library General Public License v2.1 or later", - "licenseId": "LGPL-2.1+", - "seeAlso": [ - "https://www.gnu.org/licenses/old-licenses/lgpl-2.1-standalone.html", - "https://opensource.org/licenses/LGPL-2.1" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/GFDL-1.2-invariants-only.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/GFDL-1.2-invariants-only.json", - "referenceNumber": 388, - "name": "GNU Free Documentation License v1.2 only - invariants", - "licenseId": "GFDL-1.2-invariants-only", - "seeAlso": [ - "https://www.gnu.org/licenses/old-licenses/fdl-1.2.txt" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/Zend-2.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Zend-2.0.json", - "referenceNumber": 389, - "name": "Zend License v2.0", - "licenseId": "Zend-2.0", - "seeAlso": [ - "https://web.archive.org/web/20130517195954/http://www.zend.com/license/2_00.txt" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/GFDL-1.1-no-invariants-or-later.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/GFDL-1.1-no-invariants-or-later.json", - "referenceNumber": 390, - "name": "GNU Free Documentation License v1.1 or later - no invariants", - "licenseId": "GFDL-1.1-no-invariants-or-later", - "seeAlso": [ - "https://www.gnu.org/licenses/old-licenses/fdl-1.1.txt" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/mpich2.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/mpich2.json", - "referenceNumber": 391, - "name": "mpich2 License", - "licenseId": "mpich2", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/MIT" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/NLOD-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/NLOD-1.0.json", - "referenceNumber": 392, - "name": "Norwegian Licence for Open Government Data", - "licenseId": "NLOD-1.0", - "seeAlso": [ - "http://data.norge.no/nlod/en/1.0" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/gnuplot.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/gnuplot.json", - "referenceNumber": 393, - "name": "gnuplot License", - "licenseId": "gnuplot", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/Gnuplot" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/CERN-OHL-S-2.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CERN-OHL-S-2.0.json", - "referenceNumber": 394, - "name": "CERN Open Hardware Licence Version 2 - Strongly Reciprocal", - "licenseId": "CERN-OHL-S-2.0", - "seeAlso": [ - "https://www.ohwr.org/project/cernohl/wikis/Documents/CERN-OHL-version-2" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/OGL-UK-2.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/OGL-UK-2.0.json", - "referenceNumber": 395, - "name": "Open Government Licence v2.0", - "licenseId": "OGL-UK-2.0", - "seeAlso": [ - "http://www.nationalarchives.gov.uk/doc/open-government-licence/version/2/" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/NPL-1.1.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/NPL-1.1.json", - "referenceNumber": 396, - "name": "Netscape Public License v1.1", - "licenseId": "NPL-1.1", - "seeAlso": [ - "http://www.mozilla.org/MPL/NPL/1.1/" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/Zed.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Zed.json", - "referenceNumber": 397, - "name": "Zed License", - "licenseId": "Zed", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/Zed" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/VOSTROM.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/VOSTROM.json", - "referenceNumber": 398, - "name": "VOSTROM Public License for Open Source", - "licenseId": "VOSTROM", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/VOSTROM" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/ZPL-2.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/ZPL-2.0.json", - "referenceNumber": 399, - "name": "Zope Public License 2.0", - "licenseId": "ZPL-2.0", - "seeAlso": [ - "http://old.zope.org/Resources/License/ZPL-2.0", - "https://opensource.org/licenses/ZPL-2.0" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/CERN-OHL-W-2.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CERN-OHL-W-2.0.json", - "referenceNumber": 400, - "name": "CERN Open Hardware Licence Version 2 - Weakly Reciprocal", - "licenseId": "CERN-OHL-W-2.0", - "seeAlso": [ - "https://www.ohwr.org/project/cernohl/wikis/Documents/CERN-OHL-version-2" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/CC-BY-NC-SA-2.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-SA-2.0.json", - "referenceNumber": 401, - "name": "Creative Commons Attribution Non Commercial Share Alike 2.0 Generic", - "licenseId": "CC-BY-NC-SA-2.0", - "seeAlso": [ - "https://creativecommons.org/licenses/by-nc-sa/2.0/legalcode" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/APSL-2.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/APSL-2.0.json", - "referenceNumber": 402, - "name": "Apple Public Source License 2.0", - "licenseId": "APSL-2.0", - "seeAlso": [ - "http://www.opensource.apple.com/license/apsl/" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/LPL-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/LPL-1.0.json", - "referenceNumber": 403, - "name": "Lucent Public License Version 1.0", - "licenseId": "LPL-1.0", - "seeAlso": [ - "https://opensource.org/licenses/LPL-1.0" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/ANTLR-PD-fallback.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/ANTLR-PD-fallback.json", - "referenceNumber": 404, - "name": "ANTLR Software Rights Notice with license fallback", - "licenseId": "ANTLR-PD-fallback", - "seeAlso": [ - "http://www.antlr2.org/license.html" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/libtiff.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/libtiff.json", - "referenceNumber": 405, - "name": "libtiff License", - "licenseId": "libtiff", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/libtiff" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/HPND.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/HPND.json", - "referenceNumber": 406, - "name": "Historical Permission Notice and Disclaimer", - "licenseId": "HPND", - "seeAlso": [ - "https://opensource.org/licenses/HPND" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/GPL-3.0-or-later.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/GPL-3.0-or-later.json", - "referenceNumber": 407, - "name": "GNU General Public License v3.0 or later", - "licenseId": "GPL-3.0-or-later", - "seeAlso": [ - "https://www.gnu.org/licenses/gpl-3.0-standalone.html", - "https://opensource.org/licenses/GPL-3.0" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/Artistic-2.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Artistic-2.0.json", - "referenceNumber": 408, - "name": "Artistic License 2.0", - "licenseId": "Artistic-2.0", - "seeAlso": [ - "http://www.perlfoundation.org/artistic_license_2_0", - "https://www.perlfoundation.org/artistic-license-20.html", - "https://opensource.org/licenses/artistic-license-2.0" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/Unicode-DFS-2015.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Unicode-DFS-2015.json", - "referenceNumber": 409, - "name": "Unicode License Agreement - Data Files and Software (2015)", - "licenseId": "Unicode-DFS-2015", - "seeAlso": [ - "https://web.archive.org/web/20151224134844/http://unicode.org/copyright.html" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/CC-BY-NC-4.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-4.0.json", - "referenceNumber": 410, - "name": "Creative Commons Attribution Non Commercial 4.0 International", - "licenseId": "CC-BY-NC-4.0", - "seeAlso": [ - "https://creativecommons.org/licenses/by-nc/4.0/legalcode" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/RPL-1.1.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/RPL-1.1.json", - "referenceNumber": 411, - "name": "Reciprocal Public License 1.1", - "licenseId": "RPL-1.1", - "seeAlso": [ - "https://opensource.org/licenses/RPL-1.1" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/CC-BY-SA-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CC-BY-SA-1.0.json", - "referenceNumber": 412, - "name": "Creative Commons Attribution Share Alike 1.0 Generic", - "licenseId": "CC-BY-SA-1.0", - "seeAlso": [ - "https://creativecommons.org/licenses/by-sa/1.0/legalcode" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/Cube.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Cube.json", - "referenceNumber": 413, - "name": "Cube License", - "licenseId": "Cube", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/Cube" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/ODC-By-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/ODC-By-1.0.json", - "referenceNumber": 414, - "name": "Open Data Commons Attribution License v1.0", - "licenseId": "ODC-By-1.0", - "seeAlso": [ - "https://opendatacommons.org/licenses/by/1.0/" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/copyleft-next-0.3.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/copyleft-next-0.3.0.json", - "referenceNumber": 415, - "name": "copyleft-next 0.3.0", - "licenseId": "copyleft-next-0.3.0", - "seeAlso": [ - "https://github.com/copyleft-next/copyleft-next/blob/master/Releases/copyleft-next-0.3.0" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/CC-BY-ND-4.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CC-BY-ND-4.0.json", - "referenceNumber": 416, - "name": "Creative Commons Attribution No Derivatives 4.0 International", - "licenseId": "CC-BY-ND-4.0", - "seeAlso": [ - "https://creativecommons.org/licenses/by-nd/4.0/legalcode" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/ZPL-1.1.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/ZPL-1.1.json", - "referenceNumber": 417, - "name": "Zope Public License 1.1", - "licenseId": "ZPL-1.1", - "seeAlso": [ - "http://old.zope.org/Resources/License/ZPL-1.1" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/GFDL-1.3-or-later.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/GFDL-1.3-or-later.json", - "referenceNumber": 418, - "name": "GNU Free Documentation License v1.3 or later", - "licenseId": "GFDL-1.3-or-later", - "seeAlso": [ - "https://www.gnu.org/licenses/fdl-1.3.txt" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/CATOSL-1.1.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CATOSL-1.1.json", - "referenceNumber": 419, - "name": "Computer Associates Trusted Open Source License 1.1", - "licenseId": "CATOSL-1.1", - "seeAlso": [ - "https://opensource.org/licenses/CATOSL-1.1" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/GPL-2.0-with-classpath-exception.html", - "isDeprecatedLicenseId": true, - "detailsUrl": "https://spdx.org/licenses/GPL-2.0-with-classpath-exception.json", - "referenceNumber": 420, - "name": "GNU General Public License v2.0 w/Classpath exception", - "licenseId": "GPL-2.0-with-classpath-exception", - "seeAlso": [ - "https://www.gnu.org/software/classpath/license.html" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/LGPL-2.0.html", - "isDeprecatedLicenseId": true, - "detailsUrl": "https://spdx.org/licenses/LGPL-2.0.json", - "referenceNumber": 421, - "name": "GNU Library General Public License v2 only", - "licenseId": "LGPL-2.0", - "seeAlso": [ - "https://www.gnu.org/licenses/old-licenses/lgpl-2.0-standalone.html" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/BSD-2-Clause-Views.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/BSD-2-Clause-Views.json", - "referenceNumber": 422, - "name": "BSD 2-Clause with views sentence", - "licenseId": "BSD-2-Clause-Views", - "seeAlso": [ - "http://www.freebsd.org/copyright/freebsd-license.html", - "https://people.freebsd.org/~ivoras/wine/patch-wine-nvidia.sh", - "https://github.com/protegeproject/protege/blob/master/license.txt" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/BSL-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/BSL-1.0.json", - "referenceNumber": 423, - "name": "Boost Software License 1.0", - "licenseId": "BSL-1.0", - "seeAlso": [ - "http://www.boost.org/LICENSE_1_0.txt", - "https://opensource.org/licenses/BSL-1.0" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/CNRI-Jython.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CNRI-Jython.json", - "referenceNumber": 424, - "name": "CNRI Jython License", - "licenseId": "CNRI-Jython", - "seeAlso": [ - "http://www.jython.org/license.html" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/Eurosym.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Eurosym.json", - "referenceNumber": 425, - "name": "Eurosym License", - "licenseId": "Eurosym", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/Eurosym" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/CC-BY-SA-3.0-AT.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CC-BY-SA-3.0-AT.json", - "referenceNumber": 426, - "name": "Creative Commons Attribution Share Alike 3.0 Austria", - "licenseId": "CC-BY-SA-3.0-AT", - "seeAlso": [ - "https://creativecommons.org/licenses/by-sa/3.0/at/legalcode" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/CECILL-C.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CECILL-C.json", - "referenceNumber": 427, - "name": "CeCILL-C Free Software License Agreement", - "licenseId": "CECILL-C", - "seeAlso": [ - "http://www.cecill.info/licences/Licence_CeCILL-C_V1-en.html" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/EPICS.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/EPICS.json", - "referenceNumber": 428, - "name": "EPICS Open License", - "licenseId": "EPICS", - "seeAlso": [ - "https://epics.anl.gov/license/open.php" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/CC-BY-NC-ND-2.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-ND-2.0.json", - "referenceNumber": 429, - "name": "Creative Commons Attribution Non Commercial No Derivatives 2.0 Generic", - "licenseId": "CC-BY-NC-ND-2.0", - "seeAlso": [ - "https://creativecommons.org/licenses/by-nc-nd/2.0/legalcode" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/GD.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/GD.json", - "referenceNumber": 430, - "name": "GD License", - "licenseId": "GD", - "seeAlso": [ - "https://libgd.github.io/manuals/2.3.0/files/license-txt.html" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/X11.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/X11.json", - "referenceNumber": 431, - "name": "X11 License", - "licenseId": "X11", - "seeAlso": [ - "http://www.xfree86.org/3.3.6/COPYRIGHT2.html#3" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/MPL-1.1.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/MPL-1.1.json", - "referenceNumber": 432, - "name": "Mozilla Public License 1.1", - "licenseId": "MPL-1.1", - "seeAlso": [ - "http://www.mozilla.org/MPL/MPL-1.1.html", - "https://opensource.org/licenses/MPL-1.1" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/GFDL-1.1-invariants-only.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/GFDL-1.1-invariants-only.json", - "referenceNumber": 433, - "name": "GNU Free Documentation License v1.1 only - invariants", - "licenseId": "GFDL-1.1-invariants-only", - "seeAlso": [ - "https://www.gnu.org/licenses/old-licenses/fdl-1.1.txt" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/psfrag.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/psfrag.json", - "referenceNumber": 434, - "name": "psfrag License", - "licenseId": "psfrag", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/psfrag" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/RSCPL.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/RSCPL.json", - "referenceNumber": 435, - "name": "Ricoh Source Code Public License", - "licenseId": "RSCPL", - "seeAlso": [ - "http://wayback.archive.org/web/20060715140826/http://www.risource.org/RPL/RPL-1.0A.shtml", - "https://opensource.org/licenses/RSCPL" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/YPL-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/YPL-1.0.json", - "referenceNumber": 436, - "name": "Yahoo! Public License v1.0", - "licenseId": "YPL-1.0", - "seeAlso": [ - "http://www.zimbra.com/license/yahoo_public_license_1.0.html" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/SGI-B-1.1.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/SGI-B-1.1.json", - "referenceNumber": 437, - "name": "SGI Free Software License B v1.1", - "licenseId": "SGI-B-1.1", - "seeAlso": [ - "http://oss.sgi.com/projects/FreeB/" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/CC-BY-ND-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CC-BY-ND-1.0.json", - "referenceNumber": 438, - "name": "Creative Commons Attribution No Derivatives 1.0 Generic", - "licenseId": "CC-BY-ND-1.0", - "seeAlso": [ - "https://creativecommons.org/licenses/by-nd/1.0/legalcode" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/SGI-B-2.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/SGI-B-2.0.json", - "referenceNumber": 439, - "name": "SGI Free Software License B v2.0", - "licenseId": "SGI-B-2.0", - "seeAlso": [ - "http://oss.sgi.com/projects/FreeB/SGIFreeSWLicB.2.0.pdf" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/APAFML.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/APAFML.json", - "referenceNumber": 440, - "name": "Adobe Postscript AFM License", - "licenseId": "APAFML", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/AdobePostscriptAFM" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/Spencer-94.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Spencer-94.json", - "referenceNumber": 441, - "name": "Spencer License 94", - "licenseId": "Spencer-94", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/Henry_Spencer_Reg-Ex_Library_License" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/ISC.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/ISC.json", - "referenceNumber": 442, - "name": "ISC License", - "licenseId": "ISC", - "seeAlso": [ - "https://www.isc.org/downloads/software-support-policy/isc-license/", - "https://opensource.org/licenses/ISC" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/MIT-advertising.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/MIT-advertising.json", - "referenceNumber": 443, - "name": "Enlightenment License (e16)", - "licenseId": "MIT-advertising", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/MIT_With_Advertising" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/GFDL-1.2-invariants-or-later.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/GFDL-1.2-invariants-or-later.json", - "referenceNumber": 444, - "name": "GNU Free Documentation License v1.2 or later - invariants", - "licenseId": "GFDL-1.2-invariants-or-later", - "seeAlso": [ - "https://www.gnu.org/licenses/old-licenses/fdl-1.2.txt" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/CC-BY-NC-SA-2.5.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-SA-2.5.json", - "referenceNumber": 445, - "name": "Creative Commons Attribution Non Commercial Share Alike 2.5 Generic", - "licenseId": "CC-BY-NC-SA-2.5", - "seeAlso": [ - "https://creativecommons.org/licenses/by-nc-sa/2.5/legalcode" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/CC-BY-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CC-BY-1.0.json", - "referenceNumber": 446, - "name": "Creative Commons Attribution 1.0 Generic", - "licenseId": "CC-BY-1.0", - "seeAlso": [ - "https://creativecommons.org/licenses/by/1.0/legalcode" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/OSL-2.1.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/OSL-2.1.json", - "referenceNumber": 447, - "name": "Open Software License 2.1", - "licenseId": "OSL-2.1", - "seeAlso": [ - "http://web.archive.org/web/20050212003940/http://www.rosenlaw.com/osl21.htm", - "https://opensource.org/licenses/OSL-2.1" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/CrystalStacker.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CrystalStacker.json", - "referenceNumber": 448, - "name": "CrystalStacker License", - "licenseId": "CrystalStacker", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing:CrystalStacker?rd\u003dLicensing/CrystalStacker" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/FSFULLR.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/FSFULLR.json", - "referenceNumber": 449, - "name": "FSF Unlimited License (with License Retention)", - "licenseId": "FSFULLR", - "seeAlso": [ - "https://fedoraproject.org/wiki/Licensing/FSF_Unlimited_License#License_Retention_Variant" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/libselinux-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/libselinux-1.0.json", - "referenceNumber": 450, - "name": "libselinux public domain notice", - "licenseId": "libselinux-1.0", - "seeAlso": [ - "https://github.com/SELinuxProject/selinux/blob/master/libselinux/LICENSE" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/MulanPSL-2.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/MulanPSL-2.0.json", - "referenceNumber": 451, - "name": "Mulan Permissive Software License, Version 2", - "licenseId": "MulanPSL-2.0", - "seeAlso": [ - "https://license.coscl.org.cn/MulanPSL2/" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/LGPL-3.0.html", - "isDeprecatedLicenseId": true, - "detailsUrl": "https://spdx.org/licenses/LGPL-3.0.json", - "referenceNumber": 452, - "name": "GNU Lesser General Public License v3.0 only", - "licenseId": "LGPL-3.0", - "seeAlso": [ - "https://www.gnu.org/licenses/lgpl-3.0-standalone.html", - "https://opensource.org/licenses/LGPL-3.0" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/OLDAP-2.5.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/OLDAP-2.5.json", - "referenceNumber": 453, - "name": "Open LDAP Public License v2.5", - "licenseId": "OLDAP-2.5", - "seeAlso": [ - "http://www.openldap.org/devel/gitweb.cgi?p\u003dopenldap.git;a\u003dblob;f\u003dLICENSE;hb\u003d6852b9d90022e8593c98205413380536b1b5a7cf" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/Artistic-1.0-Perl.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Artistic-1.0-Perl.json", - "referenceNumber": 454, - "name": "Artistic License 1.0 (Perl)", - "licenseId": "Artistic-1.0-Perl", - "seeAlso": [ - "http://dev.perl.org/licenses/artistic.html" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/AFL-1.2.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/AFL-1.2.json", - "referenceNumber": 455, - "name": "Academic Free License v1.2", - "licenseId": "AFL-1.2", - "seeAlso": [ - "http://opensource.linux-mirror.org/licenses/afl-1.2.txt", - "http://wayback.archive.org/web/20021204204652/http://www.opensource.org/licenses/academic.php" - ], - "isOsiApproved": true, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/CAL-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/CAL-1.0.json", - "referenceNumber": 456, - "name": "Cryptographic Autonomy License 1.0", - "licenseId": "CAL-1.0", - "seeAlso": [ - "http://cryptographicautonomylicense.com/license-text.html", - "https://opensource.org/licenses/CAL-1.0" - ], - "isOsiApproved": true - }, - { - "reference": "https://spdx.org/licenses/BSD-4-Clause.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/BSD-4-Clause.json", - "referenceNumber": 457, - "name": "BSD 4-Clause \"Original\" or \"Old\" License", - "licenseId": "BSD-4-Clause", - "seeAlso": [ - "http://directory.fsf.org/wiki/License:BSD_4Clause" - ], - "isOsiApproved": false, - "isFsfLibre": true - }, - { - "reference": "https://spdx.org/licenses/Interbase-1.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/Interbase-1.0.json", - "referenceNumber": 458, - "name": "Interbase Public License v1.0", - "licenseId": "Interbase-1.0", - "seeAlso": [ - "https://web.archive.org/web/20060319014854/http://info.borland.com/devsupport/interbase/opensource/IPL.html" - ], - "isOsiApproved": false - }, - { - "reference": "https://spdx.org/licenses/NPOSL-3.0.html", - "isDeprecatedLicenseId": false, - "detailsUrl": "https://spdx.org/licenses/NPOSL-3.0.json", - "referenceNumber": 459, - "name": "Non-Profit Open Software License 3.0", - "licenseId": "NPOSL-3.0", - "seeAlso": [ - "https://opensource.org/licenses/NOSL3.0" - ], - "isOsiApproved": true - } - ], - "releaseDate": "2021-05-20" +{ + "licenseListVersion": "3.21", + "licenses": [ + { + "reference": "https://spdx.org/licenses/0BSD.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/0BSD.json", + "referenceNumber": 534, + "name": "BSD Zero Clause License", + "licenseId": "0BSD", + "seeAlso": [ + "http://landley.net/toybox/license.html", + "https://opensource.org/licenses/0BSD" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/AAL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/AAL.json", + "referenceNumber": 152, + "name": "Attribution Assurance License", + "licenseId": "AAL", + "seeAlso": [ + "https://opensource.org/licenses/attribution" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/Abstyles.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Abstyles.json", + "referenceNumber": 225, + "name": "Abstyles License", + "licenseId": "Abstyles", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Abstyles" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/AdaCore-doc.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/AdaCore-doc.json", + "referenceNumber": 396, + "name": "AdaCore Doc License", + "licenseId": "AdaCore-doc", + "seeAlso": [ + "https://github.com/AdaCore/xmlada/blob/master/docs/index.rst", + "https://github.com/AdaCore/gnatcoll-core/blob/master/docs/index.rst", + "https://github.com/AdaCore/gnatcoll-db/blob/master/docs/index.rst" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Adobe-2006.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Adobe-2006.json", + "referenceNumber": 106, + "name": "Adobe Systems Incorporated Source Code License Agreement", + "licenseId": "Adobe-2006", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/AdobeLicense" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Adobe-Glyph.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Adobe-Glyph.json", + "referenceNumber": 92, + "name": "Adobe Glyph List License", + "licenseId": "Adobe-Glyph", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/MIT#AdobeGlyph" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/ADSL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/ADSL.json", + "referenceNumber": 73, + "name": "Amazon Digital Services License", + "licenseId": "ADSL", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/AmazonDigitalServicesLicense" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/AFL-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/AFL-1.1.json", + "referenceNumber": 463, + "name": "Academic Free License v1.1", + "licenseId": "AFL-1.1", + "seeAlso": [ + "http://opensource.linux-mirror.org/licenses/afl-1.1.txt", + "http://wayback.archive.org/web/20021004124254/http://www.opensource.org/licenses/academic.php" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/AFL-1.2.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/AFL-1.2.json", + "referenceNumber": 306, + "name": "Academic Free License v1.2", + "licenseId": "AFL-1.2", + "seeAlso": [ + "http://opensource.linux-mirror.org/licenses/afl-1.2.txt", + "http://wayback.archive.org/web/20021204204652/http://www.opensource.org/licenses/academic.php" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/AFL-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/AFL-2.0.json", + "referenceNumber": 154, + "name": "Academic Free License v2.0", + "licenseId": "AFL-2.0", + "seeAlso": [ + "http://wayback.archive.org/web/20060924134533/http://www.opensource.org/licenses/afl-2.0.txt" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/AFL-2.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/AFL-2.1.json", + "referenceNumber": 305, + "name": "Academic Free License v2.1", + "licenseId": "AFL-2.1", + "seeAlso": [ + "http://opensource.linux-mirror.org/licenses/afl-2.1.txt" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/AFL-3.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/AFL-3.0.json", + "referenceNumber": 502, + "name": "Academic Free License v3.0", + "licenseId": "AFL-3.0", + "seeAlso": [ + "http://www.rosenlaw.com/AFL3.0.htm", + "https://opensource.org/licenses/afl-3.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/Afmparse.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Afmparse.json", + "referenceNumber": 111, + "name": "Afmparse License", + "licenseId": "Afmparse", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Afmparse" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/AGPL-1.0.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/AGPL-1.0.json", + "referenceNumber": 256, + "name": "Affero General Public License v1.0", + "licenseId": "AGPL-1.0", + "seeAlso": [ + "http://www.affero.org/oagpl.html" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/AGPL-1.0-only.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/AGPL-1.0-only.json", + "referenceNumber": 389, + "name": "Affero General Public License v1.0 only", + "licenseId": "AGPL-1.0-only", + "seeAlso": [ + "http://www.affero.org/oagpl.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/AGPL-1.0-or-later.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/AGPL-1.0-or-later.json", + "referenceNumber": 35, + "name": "Affero General Public License v1.0 or later", + "licenseId": "AGPL-1.0-or-later", + "seeAlso": [ + "http://www.affero.org/oagpl.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/AGPL-3.0.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/AGPL-3.0.json", + "referenceNumber": 232, + "name": "GNU Affero General Public License v3.0", + "licenseId": "AGPL-3.0", + "seeAlso": [ + "https://www.gnu.org/licenses/agpl.txt", + "https://opensource.org/licenses/AGPL-3.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/AGPL-3.0-only.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/AGPL-3.0-only.json", + "referenceNumber": 34, + "name": "GNU Affero General Public License v3.0 only", + "licenseId": "AGPL-3.0-only", + "seeAlso": [ + "https://www.gnu.org/licenses/agpl.txt", + "https://opensource.org/licenses/AGPL-3.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/AGPL-3.0-or-later.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/AGPL-3.0-or-later.json", + "referenceNumber": 217, + "name": "GNU Affero General Public License v3.0 or later", + "licenseId": "AGPL-3.0-or-later", + "seeAlso": [ + "https://www.gnu.org/licenses/agpl.txt", + "https://opensource.org/licenses/AGPL-3.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/Aladdin.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Aladdin.json", + "referenceNumber": 63, + "name": "Aladdin Free Public License", + "licenseId": "Aladdin", + "seeAlso": [ + "http://pages.cs.wisc.edu/~ghost/doc/AFPL/6.01/Public.htm" + ], + "isOsiApproved": false, + "isFsfLibre": false + }, + { + "reference": "https://spdx.org/licenses/AMDPLPA.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/AMDPLPA.json", + "referenceNumber": 386, + "name": "AMD\u0027s plpa_map.c License", + "licenseId": "AMDPLPA", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/AMD_plpa_map_License" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/AML.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/AML.json", + "referenceNumber": 147, + "name": "Apple MIT License", + "licenseId": "AML", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Apple_MIT_License" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/AMPAS.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/AMPAS.json", + "referenceNumber": 90, + "name": "Academy of Motion Picture Arts and Sciences BSD", + "licenseId": "AMPAS", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/BSD#AMPASBSD" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/ANTLR-PD.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/ANTLR-PD.json", + "referenceNumber": 448, + "name": "ANTLR Software Rights Notice", + "licenseId": "ANTLR-PD", + "seeAlso": [ + "http://www.antlr2.org/license.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/ANTLR-PD-fallback.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/ANTLR-PD-fallback.json", + "referenceNumber": 201, + "name": "ANTLR Software Rights Notice with license fallback", + "licenseId": "ANTLR-PD-fallback", + "seeAlso": [ + "http://www.antlr2.org/license.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Apache-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Apache-1.0.json", + "referenceNumber": 434, + "name": "Apache License 1.0", + "licenseId": "Apache-1.0", + "seeAlso": [ + "http://www.apache.org/licenses/LICENSE-1.0" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/Apache-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Apache-1.1.json", + "referenceNumber": 524, + "name": "Apache License 1.1", + "licenseId": "Apache-1.1", + "seeAlso": [ + "http://apache.org/licenses/LICENSE-1.1", + "https://opensource.org/licenses/Apache-1.1" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/Apache-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Apache-2.0.json", + "referenceNumber": 264, + "name": "Apache License 2.0", + "licenseId": "Apache-2.0", + "seeAlso": [ + "https://www.apache.org/licenses/LICENSE-2.0", + "https://opensource.org/licenses/Apache-2.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/APAFML.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/APAFML.json", + "referenceNumber": 184, + "name": "Adobe Postscript AFM License", + "licenseId": "APAFML", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/AdobePostscriptAFM" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/APL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/APL-1.0.json", + "referenceNumber": 410, + "name": "Adaptive Public License 1.0", + "licenseId": "APL-1.0", + "seeAlso": [ + "https://opensource.org/licenses/APL-1.0" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/App-s2p.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/App-s2p.json", + "referenceNumber": 150, + "name": "App::s2p License", + "licenseId": "App-s2p", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/App-s2p" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/APSL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/APSL-1.0.json", + "referenceNumber": 177, + "name": "Apple Public Source License 1.0", + "licenseId": "APSL-1.0", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Apple_Public_Source_License_1.0" + ], + "isOsiApproved": true, + "isFsfLibre": false + }, + { + "reference": "https://spdx.org/licenses/APSL-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/APSL-1.1.json", + "referenceNumber": 536, + "name": "Apple Public Source License 1.1", + "licenseId": "APSL-1.1", + "seeAlso": [ + "http://www.opensource.apple.com/source/IOSerialFamily/IOSerialFamily-7/APPLE_LICENSE" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/APSL-1.2.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/APSL-1.2.json", + "referenceNumber": 479, + "name": "Apple Public Source License 1.2", + "licenseId": "APSL-1.2", + "seeAlso": [ + "http://www.samurajdata.se/opensource/mirror/licenses/apsl.php" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/APSL-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/APSL-2.0.json", + "referenceNumber": 183, + "name": "Apple Public Source License 2.0", + "licenseId": "APSL-2.0", + "seeAlso": [ + "http://www.opensource.apple.com/license/apsl/" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/Arphic-1999.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Arphic-1999.json", + "referenceNumber": 78, + "name": "Arphic Public License", + "licenseId": "Arphic-1999", + "seeAlso": [ + "http://ftp.gnu.org/gnu/non-gnu/chinese-fonts-truetype/LICENSE" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Artistic-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Artistic-1.0.json", + "referenceNumber": 282, + "name": "Artistic License 1.0", + "licenseId": "Artistic-1.0", + "seeAlso": [ + "https://opensource.org/licenses/Artistic-1.0" + ], + "isOsiApproved": true, + "isFsfLibre": false + }, + { + "reference": "https://spdx.org/licenses/Artistic-1.0-cl8.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Artistic-1.0-cl8.json", + "referenceNumber": 210, + "name": "Artistic License 1.0 w/clause 8", + "licenseId": "Artistic-1.0-cl8", + "seeAlso": [ + "https://opensource.org/licenses/Artistic-1.0" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/Artistic-1.0-Perl.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Artistic-1.0-Perl.json", + "referenceNumber": 550, + "name": "Artistic License 1.0 (Perl)", + "licenseId": "Artistic-1.0-Perl", + "seeAlso": [ + "http://dev.perl.org/licenses/artistic.html" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/Artistic-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Artistic-2.0.json", + "referenceNumber": 148, + "name": "Artistic License 2.0", + "licenseId": "Artistic-2.0", + "seeAlso": [ + "http://www.perlfoundation.org/artistic_license_2_0", + "https://www.perlfoundation.org/artistic-license-20.html", + "https://opensource.org/licenses/artistic-license-2.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/ASWF-Digital-Assets-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/ASWF-Digital-Assets-1.0.json", + "referenceNumber": 277, + "name": "ASWF Digital Assets License version 1.0", + "licenseId": "ASWF-Digital-Assets-1.0", + "seeAlso": [ + "https://github.com/AcademySoftwareFoundation/foundation/blob/main/digital_assets/aswf_digital_assets_license_v1.0.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/ASWF-Digital-Assets-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/ASWF-Digital-Assets-1.1.json", + "referenceNumber": 266, + "name": "ASWF Digital Assets License 1.1", + "licenseId": "ASWF-Digital-Assets-1.1", + "seeAlso": [ + "https://github.com/AcademySoftwareFoundation/foundation/blob/main/digital_assets/aswf_digital_assets_license_v1.1.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Baekmuk.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Baekmuk.json", + "referenceNumber": 76, + "name": "Baekmuk License", + "licenseId": "Baekmuk", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing:Baekmuk?rd\u003dLicensing/Baekmuk" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Bahyph.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Bahyph.json", + "referenceNumber": 4, + "name": "Bahyph License", + "licenseId": "Bahyph", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Bahyph" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Barr.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Barr.json", + "referenceNumber": 401, + "name": "Barr License", + "licenseId": "Barr", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Barr" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Beerware.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Beerware.json", + "referenceNumber": 487, + "name": "Beerware License", + "licenseId": "Beerware", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Beerware", + "https://people.freebsd.org/~phk/" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Bitstream-Charter.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Bitstream-Charter.json", + "referenceNumber": 175, + "name": "Bitstream Charter Font License", + "licenseId": "Bitstream-Charter", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Charter#License_Text", + "https://raw.githubusercontent.com/blackhole89/notekit/master/data/fonts/Charter%20license.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Bitstream-Vera.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Bitstream-Vera.json", + "referenceNumber": 505, + "name": "Bitstream Vera Font License", + "licenseId": "Bitstream-Vera", + "seeAlso": [ + "https://web.archive.org/web/20080207013128/http://www.gnome.org/fonts/", + "https://docubrain.com/sites/default/files/licenses/bitstream-vera.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/BitTorrent-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BitTorrent-1.0.json", + "referenceNumber": 500, + "name": "BitTorrent Open Source License v1.0", + "licenseId": "BitTorrent-1.0", + "seeAlso": [ + "http://sources.gentoo.org/cgi-bin/viewvc.cgi/gentoo-x86/licenses/BitTorrent?r1\u003d1.1\u0026r2\u003d1.1.1.1\u0026diff_format\u003ds" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/BitTorrent-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BitTorrent-1.1.json", + "referenceNumber": 77, + "name": "BitTorrent Open Source License v1.1", + "licenseId": "BitTorrent-1.1", + "seeAlso": [ + "http://directory.fsf.org/wiki/License:BitTorrentOSL1.1" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/blessing.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/blessing.json", + "referenceNumber": 444, + "name": "SQLite Blessing", + "licenseId": "blessing", + "seeAlso": [ + "https://www.sqlite.org/src/artifact/e33a4df7e32d742a?ln\u003d4-9", + "https://sqlite.org/src/artifact/df5091916dbb40e6" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/BlueOak-1.0.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BlueOak-1.0.0.json", + "referenceNumber": 428, + "name": "Blue Oak Model License 1.0.0", + "licenseId": "BlueOak-1.0.0", + "seeAlso": [ + "https://blueoakcouncil.org/license/1.0.0" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Boehm-GC.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Boehm-GC.json", + "referenceNumber": 314, + "name": "Boehm-Demers-Weiser GC License", + "licenseId": "Boehm-GC", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing:MIT#Another_Minimal_variant_(found_in_libatomic_ops)", + "https://github.com/uim/libgcroots/blob/master/COPYING", + "https://github.com/ivmai/libatomic_ops/blob/master/LICENSE" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Borceux.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Borceux.json", + "referenceNumber": 327, + "name": "Borceux license", + "licenseId": "Borceux", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Borceux" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Brian-Gladman-3-Clause.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Brian-Gladman-3-Clause.json", + "referenceNumber": 131, + "name": "Brian Gladman 3-Clause License", + "licenseId": "Brian-Gladman-3-Clause", + "seeAlso": [ + "https://github.com/SWI-Prolog/packages-clib/blob/master/sha1/brg_endian.h" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/BSD-1-Clause.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BSD-1-Clause.json", + "referenceNumber": 200, + "name": "BSD 1-Clause License", + "licenseId": "BSD-1-Clause", + "seeAlso": [ + "https://svnweb.freebsd.org/base/head/include/ifaddrs.h?revision\u003d326823" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/BSD-2-Clause.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BSD-2-Clause.json", + "referenceNumber": 269, + "name": "BSD 2-Clause \"Simplified\" License", + "licenseId": "BSD-2-Clause", + "seeAlso": [ + "https://opensource.org/licenses/BSD-2-Clause" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/BSD-2-Clause-FreeBSD.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/BSD-2-Clause-FreeBSD.json", + "referenceNumber": 22, + "name": "BSD 2-Clause FreeBSD License", + "licenseId": "BSD-2-Clause-FreeBSD", + "seeAlso": [ + "http://www.freebsd.org/copyright/freebsd-license.html" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/BSD-2-Clause-NetBSD.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/BSD-2-Clause-NetBSD.json", + "referenceNumber": 365, + "name": "BSD 2-Clause NetBSD License", + "licenseId": "BSD-2-Clause-NetBSD", + "seeAlso": [ + "http://www.netbsd.org/about/redistribution.html#default" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/BSD-2-Clause-Patent.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BSD-2-Clause-Patent.json", + "referenceNumber": 494, + "name": "BSD-2-Clause Plus Patent License", + "licenseId": "BSD-2-Clause-Patent", + "seeAlso": [ + "https://opensource.org/licenses/BSDplusPatent" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/BSD-2-Clause-Views.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BSD-2-Clause-Views.json", + "referenceNumber": 552, + "name": "BSD 2-Clause with views sentence", + "licenseId": "BSD-2-Clause-Views", + "seeAlso": [ + "http://www.freebsd.org/copyright/freebsd-license.html", + "https://people.freebsd.org/~ivoras/wine/patch-wine-nvidia.sh", + "https://github.com/protegeproject/protege/blob/master/license.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/BSD-3-Clause.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BSD-3-Clause.json", + "referenceNumber": 320, + "name": "BSD 3-Clause \"New\" or \"Revised\" License", + "licenseId": "BSD-3-Clause", + "seeAlso": [ + "https://opensource.org/licenses/BSD-3-Clause", + "https://www.eclipse.org/org/documents/edl-v10.php" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/BSD-3-Clause-Attribution.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BSD-3-Clause-Attribution.json", + "referenceNumber": 195, + "name": "BSD with attribution", + "licenseId": "BSD-3-Clause-Attribution", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/BSD_with_Attribution" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/BSD-3-Clause-Clear.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BSD-3-Clause-Clear.json", + "referenceNumber": 233, + "name": "BSD 3-Clause Clear License", + "licenseId": "BSD-3-Clause-Clear", + "seeAlso": [ + "http://labs.metacarta.com/license-explanation.html#license" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/BSD-3-Clause-LBNL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BSD-3-Clause-LBNL.json", + "referenceNumber": 45, + "name": "Lawrence Berkeley National Labs BSD variant license", + "licenseId": "BSD-3-Clause-LBNL", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/LBNLBSD" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/BSD-3-Clause-Modification.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BSD-3-Clause-Modification.json", + "referenceNumber": 202, + "name": "BSD 3-Clause Modification", + "licenseId": "BSD-3-Clause-Modification", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing:BSD#Modification_Variant" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/BSD-3-Clause-No-Military-License.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BSD-3-Clause-No-Military-License.json", + "referenceNumber": 341, + "name": "BSD 3-Clause No Military License", + "licenseId": "BSD-3-Clause-No-Military-License", + "seeAlso": [ + "https://gitlab.syncad.com/hive/dhive/-/blob/master/LICENSE", + "https://github.com/greymass/swift-eosio/blob/master/LICENSE" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/BSD-3-Clause-No-Nuclear-License.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BSD-3-Clause-No-Nuclear-License.json", + "referenceNumber": 331, + "name": "BSD 3-Clause No Nuclear License", + "licenseId": "BSD-3-Clause-No-Nuclear-License", + "seeAlso": [ + "http://download.oracle.com/otn-pub/java/licenses/bsd.txt?AuthParam\u003d1467140197_43d516ce1776bd08a58235a7785be1cc" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/BSD-3-Clause-No-Nuclear-License-2014.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BSD-3-Clause-No-Nuclear-License-2014.json", + "referenceNumber": 442, + "name": "BSD 3-Clause No Nuclear License 2014", + "licenseId": "BSD-3-Clause-No-Nuclear-License-2014", + "seeAlso": [ + "https://java.net/projects/javaeetutorial/pages/BerkeleyLicense" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/BSD-3-Clause-No-Nuclear-Warranty.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BSD-3-Clause-No-Nuclear-Warranty.json", + "referenceNumber": 79, + "name": "BSD 3-Clause No Nuclear Warranty", + "licenseId": "BSD-3-Clause-No-Nuclear-Warranty", + "seeAlso": [ + "https://jogamp.org/git/?p\u003dgluegen.git;a\u003dblob_plain;f\u003dLICENSE.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/BSD-3-Clause-Open-MPI.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BSD-3-Clause-Open-MPI.json", + "referenceNumber": 483, + "name": "BSD 3-Clause Open MPI variant", + "licenseId": "BSD-3-Clause-Open-MPI", + "seeAlso": [ + "https://www.open-mpi.org/community/license.php", + "http://www.netlib.org/lapack/LICENSE.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/BSD-4-Clause.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BSD-4-Clause.json", + "referenceNumber": 471, + "name": "BSD 4-Clause \"Original\" or \"Old\" License", + "licenseId": "BSD-4-Clause", + "seeAlso": [ + "http://directory.fsf.org/wiki/License:BSD_4Clause" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/BSD-4-Clause-Shortened.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BSD-4-Clause-Shortened.json", + "referenceNumber": 41, + "name": "BSD 4 Clause Shortened", + "licenseId": "BSD-4-Clause-Shortened", + "seeAlso": [ + "https://metadata.ftp-master.debian.org/changelogs//main/a/arpwatch/arpwatch_2.1a15-7_copyright" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/BSD-4-Clause-UC.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BSD-4-Clause-UC.json", + "referenceNumber": 160, + "name": "BSD-4-Clause (University of California-Specific)", + "licenseId": "BSD-4-Clause-UC", + "seeAlso": [ + "http://www.freebsd.org/copyright/license.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/BSD-4.3RENO.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BSD-4.3RENO.json", + "referenceNumber": 130, + "name": "BSD 4.3 RENO License", + "licenseId": "BSD-4.3RENO", + "seeAlso": [ + "https://sourceware.org/git/?p\u003dbinutils-gdb.git;a\u003dblob;f\u003dlibiberty/strcasecmp.c;h\u003d131d81c2ce7881fa48c363dc5bf5fb302c61ce0b;hb\u003dHEAD" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/BSD-4.3TAHOE.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BSD-4.3TAHOE.json", + "referenceNumber": 507, + "name": "BSD 4.3 TAHOE License", + "licenseId": "BSD-4.3TAHOE", + "seeAlso": [ + "https://github.com/389ds/389-ds-base/blob/main/ldap/include/sysexits-compat.h#L15", + "https://git.savannah.gnu.org/cgit/indent.git/tree/doc/indent.texi?id\u003da74c6b4ee49397cf330b333da1042bffa60ed14f#n1788" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/BSD-Advertising-Acknowledgement.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BSD-Advertising-Acknowledgement.json", + "referenceNumber": 367, + "name": "BSD Advertising Acknowledgement License", + "licenseId": "BSD-Advertising-Acknowledgement", + "seeAlso": [ + "https://github.com/python-excel/xlrd/blob/master/LICENSE#L33" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/BSD-Attribution-HPND-disclaimer.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BSD-Attribution-HPND-disclaimer.json", + "referenceNumber": 280, + "name": "BSD with Attribution and HPND disclaimer", + "licenseId": "BSD-Attribution-HPND-disclaimer", + "seeAlso": [ + "https://github.com/cyrusimap/cyrus-sasl/blob/master/COPYING" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/BSD-Protection.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BSD-Protection.json", + "referenceNumber": 126, + "name": "BSD Protection License", + "licenseId": "BSD-Protection", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/BSD_Protection_License" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/BSD-Source-Code.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BSD-Source-Code.json", + "referenceNumber": 397, + "name": "BSD Source Code Attribution", + "licenseId": "BSD-Source-Code", + "seeAlso": [ + "https://github.com/robbiehanson/CocoaHTTPServer/blob/master/LICENSE.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/BSL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BSL-1.0.json", + "referenceNumber": 467, + "name": "Boost Software License 1.0", + "licenseId": "BSL-1.0", + "seeAlso": [ + "http://www.boost.org/LICENSE_1_0.txt", + "https://opensource.org/licenses/BSL-1.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/BUSL-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BUSL-1.1.json", + "referenceNumber": 255, + "name": "Business Source License 1.1", + "licenseId": "BUSL-1.1", + "seeAlso": [ + "https://mariadb.com/bsl11/" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/bzip2-1.0.5.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/bzip2-1.0.5.json", + "referenceNumber": 245, + "name": "bzip2 and libbzip2 License v1.0.5", + "licenseId": "bzip2-1.0.5", + "seeAlso": [ + "https://sourceware.org/bzip2/1.0.5/bzip2-manual-1.0.5.html", + "http://bzip.org/1.0.5/bzip2-manual-1.0.5.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/bzip2-1.0.6.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/bzip2-1.0.6.json", + "referenceNumber": 392, + "name": "bzip2 and libbzip2 License v1.0.6", + "licenseId": "bzip2-1.0.6", + "seeAlso": [ + "https://sourceware.org/git/?p\u003dbzip2.git;a\u003dblob;f\u003dLICENSE;hb\u003dbzip2-1.0.6", + "http://bzip.org/1.0.5/bzip2-manual-1.0.5.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/C-UDA-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/C-UDA-1.0.json", + "referenceNumber": 191, + "name": "Computational Use of Data Agreement v1.0", + "licenseId": "C-UDA-1.0", + "seeAlso": [ + "https://github.com/microsoft/Computational-Use-of-Data-Agreement/blob/master/C-UDA-1.0.md", + "https://cdla.dev/computational-use-of-data-agreement-v1-0/" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CAL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CAL-1.0.json", + "referenceNumber": 551, + "name": "Cryptographic Autonomy License 1.0", + "licenseId": "CAL-1.0", + "seeAlso": [ + "http://cryptographicautonomylicense.com/license-text.html", + "https://opensource.org/licenses/CAL-1.0" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/CAL-1.0-Combined-Work-Exception.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CAL-1.0-Combined-Work-Exception.json", + "referenceNumber": 316, + "name": "Cryptographic Autonomy License 1.0 (Combined Work Exception)", + "licenseId": "CAL-1.0-Combined-Work-Exception", + "seeAlso": [ + "http://cryptographicautonomylicense.com/license-text.html", + "https://opensource.org/licenses/CAL-1.0" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/Caldera.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Caldera.json", + "referenceNumber": 178, + "name": "Caldera License", + "licenseId": "Caldera", + "seeAlso": [ + "http://www.lemis.com/grog/UNIX/ancient-source-all.pdf" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CATOSL-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CATOSL-1.1.json", + "referenceNumber": 253, + "name": "Computer Associates Trusted Open Source License 1.1", + "licenseId": "CATOSL-1.1", + "seeAlso": [ + "https://opensource.org/licenses/CATOSL-1.1" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/CC-BY-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-1.0.json", + "referenceNumber": 205, + "name": "Creative Commons Attribution 1.0 Generic", + "licenseId": "CC-BY-1.0", + "seeAlso": [ + "https://creativecommons.org/licenses/by/1.0/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-2.0.json", + "referenceNumber": 61, + "name": "Creative Commons Attribution 2.0 Generic", + "licenseId": "CC-BY-2.0", + "seeAlso": [ + "https://creativecommons.org/licenses/by/2.0/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-2.5.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-2.5.json", + "referenceNumber": 171, + "name": "Creative Commons Attribution 2.5 Generic", + "licenseId": "CC-BY-2.5", + "seeAlso": [ + "https://creativecommons.org/licenses/by/2.5/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-2.5-AU.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-2.5-AU.json", + "referenceNumber": 128, + "name": "Creative Commons Attribution 2.5 Australia", + "licenseId": "CC-BY-2.5-AU", + "seeAlso": [ + "https://creativecommons.org/licenses/by/2.5/au/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-3.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-3.0.json", + "referenceNumber": 433, + "name": "Creative Commons Attribution 3.0 Unported", + "licenseId": "CC-BY-3.0", + "seeAlso": [ + "https://creativecommons.org/licenses/by/3.0/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-3.0-AT.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-3.0-AT.json", + "referenceNumber": 7, + "name": "Creative Commons Attribution 3.0 Austria", + "licenseId": "CC-BY-3.0-AT", + "seeAlso": [ + "https://creativecommons.org/licenses/by/3.0/at/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-3.0-DE.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-3.0-DE.json", + "referenceNumber": 317, + "name": "Creative Commons Attribution 3.0 Germany", + "licenseId": "CC-BY-3.0-DE", + "seeAlso": [ + "https://creativecommons.org/licenses/by/3.0/de/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-3.0-IGO.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-3.0-IGO.json", + "referenceNumber": 141, + "name": "Creative Commons Attribution 3.0 IGO", + "licenseId": "CC-BY-3.0-IGO", + "seeAlso": [ + "https://creativecommons.org/licenses/by/3.0/igo/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-3.0-NL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-3.0-NL.json", + "referenceNumber": 193, + "name": "Creative Commons Attribution 3.0 Netherlands", + "licenseId": "CC-BY-3.0-NL", + "seeAlso": [ + "https://creativecommons.org/licenses/by/3.0/nl/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-3.0-US.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-3.0-US.json", + "referenceNumber": 156, + "name": "Creative Commons Attribution 3.0 United States", + "licenseId": "CC-BY-3.0-US", + "seeAlso": [ + "https://creativecommons.org/licenses/by/3.0/us/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-4.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-4.0.json", + "referenceNumber": 499, + "name": "Creative Commons Attribution 4.0 International", + "licenseId": "CC-BY-4.0", + "seeAlso": [ + "https://creativecommons.org/licenses/by/4.0/legalcode" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/CC-BY-NC-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-1.0.json", + "referenceNumber": 292, + "name": "Creative Commons Attribution Non Commercial 1.0 Generic", + "licenseId": "CC-BY-NC-1.0", + "seeAlso": [ + "https://creativecommons.org/licenses/by-nc/1.0/legalcode" + ], + "isOsiApproved": false, + "isFsfLibre": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-NC-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-2.0.json", + "referenceNumber": 143, + "name": "Creative Commons Attribution Non Commercial 2.0 Generic", + "licenseId": "CC-BY-NC-2.0", + "seeAlso": [ + "https://creativecommons.org/licenses/by-nc/2.0/legalcode" + ], + "isOsiApproved": false, + "isFsfLibre": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-NC-2.5.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-2.5.json", + "referenceNumber": 457, + "name": "Creative Commons Attribution Non Commercial 2.5 Generic", + "licenseId": "CC-BY-NC-2.5", + "seeAlso": [ + "https://creativecommons.org/licenses/by-nc/2.5/legalcode" + ], + "isOsiApproved": false, + "isFsfLibre": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-NC-3.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-3.0.json", + "referenceNumber": 216, + "name": "Creative Commons Attribution Non Commercial 3.0 Unported", + "licenseId": "CC-BY-NC-3.0", + "seeAlso": [ + "https://creativecommons.org/licenses/by-nc/3.0/legalcode" + ], + "isOsiApproved": false, + "isFsfLibre": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-NC-3.0-DE.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-3.0-DE.json", + "referenceNumber": 196, + "name": "Creative Commons Attribution Non Commercial 3.0 Germany", + "licenseId": "CC-BY-NC-3.0-DE", + "seeAlso": [ + "https://creativecommons.org/licenses/by-nc/3.0/de/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-NC-4.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-4.0.json", + "referenceNumber": 248, + "name": "Creative Commons Attribution Non Commercial 4.0 International", + "licenseId": "CC-BY-NC-4.0", + "seeAlso": [ + "https://creativecommons.org/licenses/by-nc/4.0/legalcode" + ], + "isOsiApproved": false, + "isFsfLibre": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-NC-ND-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-ND-1.0.json", + "referenceNumber": 368, + "name": "Creative Commons Attribution Non Commercial No Derivatives 1.0 Generic", + "licenseId": "CC-BY-NC-ND-1.0", + "seeAlso": [ + "https://creativecommons.org/licenses/by-nd-nc/1.0/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-NC-ND-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-ND-2.0.json", + "referenceNumber": 462, + "name": "Creative Commons Attribution Non Commercial No Derivatives 2.0 Generic", + "licenseId": "CC-BY-NC-ND-2.0", + "seeAlso": [ + "https://creativecommons.org/licenses/by-nc-nd/2.0/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-NC-ND-2.5.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-ND-2.5.json", + "referenceNumber": 464, + "name": "Creative Commons Attribution Non Commercial No Derivatives 2.5 Generic", + "licenseId": "CC-BY-NC-ND-2.5", + "seeAlso": [ + "https://creativecommons.org/licenses/by-nc-nd/2.5/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-NC-ND-3.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-ND-3.0.json", + "referenceNumber": 478, + "name": "Creative Commons Attribution Non Commercial No Derivatives 3.0 Unported", + "licenseId": "CC-BY-NC-ND-3.0", + "seeAlso": [ + "https://creativecommons.org/licenses/by-nc-nd/3.0/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-NC-ND-3.0-DE.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-ND-3.0-DE.json", + "referenceNumber": 384, + "name": "Creative Commons Attribution Non Commercial No Derivatives 3.0 Germany", + "licenseId": "CC-BY-NC-ND-3.0-DE", + "seeAlso": [ + "https://creativecommons.org/licenses/by-nc-nd/3.0/de/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-NC-ND-3.0-IGO.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-ND-3.0-IGO.json", + "referenceNumber": 211, + "name": "Creative Commons Attribution Non Commercial No Derivatives 3.0 IGO", + "licenseId": "CC-BY-NC-ND-3.0-IGO", + "seeAlso": [ + "https://creativecommons.org/licenses/by-nc-nd/3.0/igo/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-NC-ND-4.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-ND-4.0.json", + "referenceNumber": 466, + "name": "Creative Commons Attribution Non Commercial No Derivatives 4.0 International", + "licenseId": "CC-BY-NC-ND-4.0", + "seeAlso": [ + "https://creativecommons.org/licenses/by-nc-nd/4.0/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-NC-SA-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-SA-1.0.json", + "referenceNumber": 132, + "name": "Creative Commons Attribution Non Commercial Share Alike 1.0 Generic", + "licenseId": "CC-BY-NC-SA-1.0", + "seeAlso": [ + "https://creativecommons.org/licenses/by-nc-sa/1.0/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-NC-SA-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-SA-2.0.json", + "referenceNumber": 420, + "name": "Creative Commons Attribution Non Commercial Share Alike 2.0 Generic", + "licenseId": "CC-BY-NC-SA-2.0", + "seeAlso": [ + "https://creativecommons.org/licenses/by-nc-sa/2.0/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-NC-SA-2.0-DE.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-SA-2.0-DE.json", + "referenceNumber": 452, + "name": "Creative Commons Attribution Non Commercial Share Alike 2.0 Germany", + "licenseId": "CC-BY-NC-SA-2.0-DE", + "seeAlso": [ + "https://creativecommons.org/licenses/by-nc-sa/2.0/de/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-NC-SA-2.0-FR.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-SA-2.0-FR.json", + "referenceNumber": 29, + "name": "Creative Commons Attribution-NonCommercial-ShareAlike 2.0 France", + "licenseId": "CC-BY-NC-SA-2.0-FR", + "seeAlso": [ + "https://creativecommons.org/licenses/by-nc-sa/2.0/fr/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-NC-SA-2.0-UK.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-SA-2.0-UK.json", + "referenceNumber": 460, + "name": "Creative Commons Attribution Non Commercial Share Alike 2.0 England and Wales", + "licenseId": "CC-BY-NC-SA-2.0-UK", + "seeAlso": [ + "https://creativecommons.org/licenses/by-nc-sa/2.0/uk/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-NC-SA-2.5.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-SA-2.5.json", + "referenceNumber": 8, + "name": "Creative Commons Attribution Non Commercial Share Alike 2.5 Generic", + "licenseId": "CC-BY-NC-SA-2.5", + "seeAlso": [ + "https://creativecommons.org/licenses/by-nc-sa/2.5/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-NC-SA-3.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-SA-3.0.json", + "referenceNumber": 271, + "name": "Creative Commons Attribution Non Commercial Share Alike 3.0 Unported", + "licenseId": "CC-BY-NC-SA-3.0", + "seeAlso": [ + "https://creativecommons.org/licenses/by-nc-sa/3.0/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-NC-SA-3.0-DE.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-SA-3.0-DE.json", + "referenceNumber": 504, + "name": "Creative Commons Attribution Non Commercial Share Alike 3.0 Germany", + "licenseId": "CC-BY-NC-SA-3.0-DE", + "seeAlso": [ + "https://creativecommons.org/licenses/by-nc-sa/3.0/de/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-NC-SA-3.0-IGO.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-SA-3.0-IGO.json", + "referenceNumber": 14, + "name": "Creative Commons Attribution Non Commercial Share Alike 3.0 IGO", + "licenseId": "CC-BY-NC-SA-3.0-IGO", + "seeAlso": [ + "https://creativecommons.org/licenses/by-nc-sa/3.0/igo/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-NC-SA-4.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-SA-4.0.json", + "referenceNumber": 338, + "name": "Creative Commons Attribution Non Commercial Share Alike 4.0 International", + "licenseId": "CC-BY-NC-SA-4.0", + "seeAlso": [ + "https://creativecommons.org/licenses/by-nc-sa/4.0/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-ND-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-ND-1.0.json", + "referenceNumber": 115, + "name": "Creative Commons Attribution No Derivatives 1.0 Generic", + "licenseId": "CC-BY-ND-1.0", + "seeAlso": [ + "https://creativecommons.org/licenses/by-nd/1.0/legalcode" + ], + "isOsiApproved": false, + "isFsfLibre": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-ND-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-ND-2.0.json", + "referenceNumber": 116, + "name": "Creative Commons Attribution No Derivatives 2.0 Generic", + "licenseId": "CC-BY-ND-2.0", + "seeAlso": [ + "https://creativecommons.org/licenses/by-nd/2.0/legalcode" + ], + "isOsiApproved": false, + "isFsfLibre": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-ND-2.5.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-ND-2.5.json", + "referenceNumber": 13, + "name": "Creative Commons Attribution No Derivatives 2.5 Generic", + "licenseId": "CC-BY-ND-2.5", + "seeAlso": [ + "https://creativecommons.org/licenses/by-nd/2.5/legalcode" + ], + "isOsiApproved": false, + "isFsfLibre": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-ND-3.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-ND-3.0.json", + "referenceNumber": 31, + "name": "Creative Commons Attribution No Derivatives 3.0 Unported", + "licenseId": "CC-BY-ND-3.0", + "seeAlso": [ + "https://creativecommons.org/licenses/by-nd/3.0/legalcode" + ], + "isOsiApproved": false, + "isFsfLibre": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-ND-3.0-DE.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-ND-3.0-DE.json", + "referenceNumber": 322, + "name": "Creative Commons Attribution No Derivatives 3.0 Germany", + "licenseId": "CC-BY-ND-3.0-DE", + "seeAlso": [ + "https://creativecommons.org/licenses/by-nd/3.0/de/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-ND-4.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-ND-4.0.json", + "referenceNumber": 44, + "name": "Creative Commons Attribution No Derivatives 4.0 International", + "licenseId": "CC-BY-ND-4.0", + "seeAlso": [ + "https://creativecommons.org/licenses/by-nd/4.0/legalcode" + ], + "isOsiApproved": false, + "isFsfLibre": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-SA-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-SA-1.0.json", + "referenceNumber": 71, + "name": "Creative Commons Attribution Share Alike 1.0 Generic", + "licenseId": "CC-BY-SA-1.0", + "seeAlso": [ + "https://creativecommons.org/licenses/by-sa/1.0/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-SA-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-SA-2.0.json", + "referenceNumber": 252, + "name": "Creative Commons Attribution Share Alike 2.0 Generic", + "licenseId": "CC-BY-SA-2.0", + "seeAlso": [ + "https://creativecommons.org/licenses/by-sa/2.0/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-SA-2.0-UK.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-SA-2.0-UK.json", + "referenceNumber": 72, + "name": "Creative Commons Attribution Share Alike 2.0 England and Wales", + "licenseId": "CC-BY-SA-2.0-UK", + "seeAlso": [ + "https://creativecommons.org/licenses/by-sa/2.0/uk/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-SA-2.1-JP.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-SA-2.1-JP.json", + "referenceNumber": 54, + "name": "Creative Commons Attribution Share Alike 2.1 Japan", + "licenseId": "CC-BY-SA-2.1-JP", + "seeAlso": [ + "https://creativecommons.org/licenses/by-sa/2.1/jp/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-SA-2.5.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-SA-2.5.json", + "referenceNumber": 378, + "name": "Creative Commons Attribution Share Alike 2.5 Generic", + "licenseId": "CC-BY-SA-2.5", + "seeAlso": [ + "https://creativecommons.org/licenses/by-sa/2.5/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-SA-3.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-SA-3.0.json", + "referenceNumber": 139, + "name": "Creative Commons Attribution Share Alike 3.0 Unported", + "licenseId": "CC-BY-SA-3.0", + "seeAlso": [ + "https://creativecommons.org/licenses/by-sa/3.0/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-SA-3.0-AT.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-SA-3.0-AT.json", + "referenceNumber": 189, + "name": "Creative Commons Attribution Share Alike 3.0 Austria", + "licenseId": "CC-BY-SA-3.0-AT", + "seeAlso": [ + "https://creativecommons.org/licenses/by-sa/3.0/at/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-SA-3.0-DE.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-SA-3.0-DE.json", + "referenceNumber": 385, + "name": "Creative Commons Attribution Share Alike 3.0 Germany", + "licenseId": "CC-BY-SA-3.0-DE", + "seeAlso": [ + "https://creativecommons.org/licenses/by-sa/3.0/de/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-SA-3.0-IGO.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-SA-3.0-IGO.json", + "referenceNumber": 213, + "name": "Creative Commons Attribution-ShareAlike 3.0 IGO", + "licenseId": "CC-BY-SA-3.0-IGO", + "seeAlso": [ + "https://creativecommons.org/licenses/by-sa/3.0/igo/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-SA-4.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-SA-4.0.json", + "referenceNumber": 342, + "name": "Creative Commons Attribution Share Alike 4.0 International", + "licenseId": "CC-BY-SA-4.0", + "seeAlso": [ + "https://creativecommons.org/licenses/by-sa/4.0/legalcode" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/CC-PDDC.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-PDDC.json", + "referenceNumber": 240, + "name": "Creative Commons Public Domain Dedication and Certification", + "licenseId": "CC-PDDC", + "seeAlso": [ + "https://creativecommons.org/licenses/publicdomain/" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC0-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC0-1.0.json", + "referenceNumber": 279, + "name": "Creative Commons Zero v1.0 Universal", + "licenseId": "CC0-1.0", + "seeAlso": [ + "https://creativecommons.org/publicdomain/zero/1.0/legalcode" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/CDDL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CDDL-1.0.json", + "referenceNumber": 187, + "name": "Common Development and Distribution License 1.0", + "licenseId": "CDDL-1.0", + "seeAlso": [ + "https://opensource.org/licenses/cddl1" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/CDDL-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CDDL-1.1.json", + "referenceNumber": 352, + "name": "Common Development and Distribution License 1.1", + "licenseId": "CDDL-1.1", + "seeAlso": [ + "http://glassfish.java.net/public/CDDL+GPL_1_1.html", + "https://javaee.github.io/glassfish/LICENSE" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CDL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CDL-1.0.json", + "referenceNumber": 12, + "name": "Common Documentation License 1.0", + "licenseId": "CDL-1.0", + "seeAlso": [ + "http://www.opensource.apple.com/cdl/", + "https://fedoraproject.org/wiki/Licensing/Common_Documentation_License", + "https://www.gnu.org/licenses/license-list.html#ACDL" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CDLA-Permissive-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CDLA-Permissive-1.0.json", + "referenceNumber": 238, + "name": "Community Data License Agreement Permissive 1.0", + "licenseId": "CDLA-Permissive-1.0", + "seeAlso": [ + "https://cdla.io/permissive-1-0" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CDLA-Permissive-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CDLA-Permissive-2.0.json", + "referenceNumber": 270, + "name": "Community Data License Agreement Permissive 2.0", + "licenseId": "CDLA-Permissive-2.0", + "seeAlso": [ + "https://cdla.dev/permissive-2-0" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CDLA-Sharing-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CDLA-Sharing-1.0.json", + "referenceNumber": 535, + "name": "Community Data License Agreement Sharing 1.0", + "licenseId": "CDLA-Sharing-1.0", + "seeAlso": [ + "https://cdla.io/sharing-1-0" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CECILL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CECILL-1.0.json", + "referenceNumber": 376, + "name": "CeCILL Free Software License Agreement v1.0", + "licenseId": "CECILL-1.0", + "seeAlso": [ + "http://www.cecill.info/licences/Licence_CeCILL_V1-fr.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CECILL-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CECILL-1.1.json", + "referenceNumber": 522, + "name": "CeCILL Free Software License Agreement v1.1", + "licenseId": "CECILL-1.1", + "seeAlso": [ + "http://www.cecill.info/licences/Licence_CeCILL_V1.1-US.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CECILL-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CECILL-2.0.json", + "referenceNumber": 149, + "name": "CeCILL Free Software License Agreement v2.0", + "licenseId": "CECILL-2.0", + "seeAlso": [ + "http://www.cecill.info/licences/Licence_CeCILL_V2-en.html" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/CECILL-2.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CECILL-2.1.json", + "referenceNumber": 226, + "name": "CeCILL Free Software License Agreement v2.1", + "licenseId": "CECILL-2.1", + "seeAlso": [ + "http://www.cecill.info/licences/Licence_CeCILL_V2.1-en.html" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/CECILL-B.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CECILL-B.json", + "referenceNumber": 308, + "name": "CeCILL-B Free Software License Agreement", + "licenseId": "CECILL-B", + "seeAlso": [ + "http://www.cecill.info/licences/Licence_CeCILL-B_V1-en.html" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/CECILL-C.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CECILL-C.json", + "referenceNumber": 129, + "name": "CeCILL-C Free Software License Agreement", + "licenseId": "CECILL-C", + "seeAlso": [ + "http://www.cecill.info/licences/Licence_CeCILL-C_V1-en.html" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/CERN-OHL-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CERN-OHL-1.1.json", + "referenceNumber": 348, + "name": "CERN Open Hardware Licence v1.1", + "licenseId": "CERN-OHL-1.1", + "seeAlso": [ + "https://www.ohwr.org/project/licenses/wikis/cern-ohl-v1.1" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CERN-OHL-1.2.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CERN-OHL-1.2.json", + "referenceNumber": 473, + "name": "CERN Open Hardware Licence v1.2", + "licenseId": "CERN-OHL-1.2", + "seeAlso": [ + "https://www.ohwr.org/project/licenses/wikis/cern-ohl-v1.2" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CERN-OHL-P-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CERN-OHL-P-2.0.json", + "referenceNumber": 439, + "name": "CERN Open Hardware Licence Version 2 - Permissive", + "licenseId": "CERN-OHL-P-2.0", + "seeAlso": [ + "https://www.ohwr.org/project/cernohl/wikis/Documents/CERN-OHL-version-2" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/CERN-OHL-S-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CERN-OHL-S-2.0.json", + "referenceNumber": 497, + "name": "CERN Open Hardware Licence Version 2 - Strongly Reciprocal", + "licenseId": "CERN-OHL-S-2.0", + "seeAlso": [ + "https://www.ohwr.org/project/cernohl/wikis/Documents/CERN-OHL-version-2" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/CERN-OHL-W-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CERN-OHL-W-2.0.json", + "referenceNumber": 493, + "name": "CERN Open Hardware Licence Version 2 - Weakly Reciprocal", + "licenseId": "CERN-OHL-W-2.0", + "seeAlso": [ + "https://www.ohwr.org/project/cernohl/wikis/Documents/CERN-OHL-version-2" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/CFITSIO.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CFITSIO.json", + "referenceNumber": 395, + "name": "CFITSIO License", + "licenseId": "CFITSIO", + "seeAlso": [ + "https://heasarc.gsfc.nasa.gov/docs/software/fitsio/c/f_user/node9.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/checkmk.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/checkmk.json", + "referenceNumber": 475, + "name": "Checkmk License", + "licenseId": "checkmk", + "seeAlso": [ + "https://github.com/libcheck/check/blob/master/checkmk/checkmk.in" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/ClArtistic.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/ClArtistic.json", + "referenceNumber": 412, + "name": "Clarified Artistic License", + "licenseId": "ClArtistic", + "seeAlso": [ + "http://gianluca.dellavedova.org/2011/01/03/clarified-artistic-license/", + "http://www.ncftp.com/ncftp/doc/LICENSE.txt" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/Clips.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Clips.json", + "referenceNumber": 28, + "name": "Clips License", + "licenseId": "Clips", + "seeAlso": [ + "https://github.com/DrItanium/maya/blob/master/LICENSE.CLIPS" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CMU-Mach.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CMU-Mach.json", + "referenceNumber": 355, + "name": "CMU Mach License", + "licenseId": "CMU-Mach", + "seeAlso": [ + "https://www.cs.cmu.edu/~410/licenses.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CNRI-Jython.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CNRI-Jython.json", + "referenceNumber": 491, + "name": "CNRI Jython License", + "licenseId": "CNRI-Jython", + "seeAlso": [ + "http://www.jython.org/license.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CNRI-Python.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CNRI-Python.json", + "referenceNumber": 120, + "name": "CNRI Python License", + "licenseId": "CNRI-Python", + "seeAlso": [ + "https://opensource.org/licenses/CNRI-Python" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/CNRI-Python-GPL-Compatible.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CNRI-Python-GPL-Compatible.json", + "referenceNumber": 404, + "name": "CNRI Python Open Source GPL Compatible License Agreement", + "licenseId": "CNRI-Python-GPL-Compatible", + "seeAlso": [ + "http://www.python.org/download/releases/1.6.1/download_win/" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/COIL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/COIL-1.0.json", + "referenceNumber": 203, + "name": "Copyfree Open Innovation License", + "licenseId": "COIL-1.0", + "seeAlso": [ + "https://coil.apotheon.org/plaintext/01.0.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Community-Spec-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Community-Spec-1.0.json", + "referenceNumber": 347, + "name": "Community Specification License 1.0", + "licenseId": "Community-Spec-1.0", + "seeAlso": [ + "https://github.com/CommunitySpecification/1.0/blob/master/1._Community_Specification_License-v1.md" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Condor-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Condor-1.1.json", + "referenceNumber": 351, + "name": "Condor Public License v1.1", + "licenseId": "Condor-1.1", + "seeAlso": [ + "http://research.cs.wisc.edu/condor/license.html#condor", + "http://web.archive.org/web/20111123062036/http://research.cs.wisc.edu/condor/license.html#condor" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/copyleft-next-0.3.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/copyleft-next-0.3.0.json", + "referenceNumber": 258, + "name": "copyleft-next 0.3.0", + "licenseId": "copyleft-next-0.3.0", + "seeAlso": [ + "https://github.com/copyleft-next/copyleft-next/blob/master/Releases/copyleft-next-0.3.0" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/copyleft-next-0.3.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/copyleft-next-0.3.1.json", + "referenceNumber": 265, + "name": "copyleft-next 0.3.1", + "licenseId": "copyleft-next-0.3.1", + "seeAlso": [ + "https://github.com/copyleft-next/copyleft-next/blob/master/Releases/copyleft-next-0.3.1" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Cornell-Lossless-JPEG.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Cornell-Lossless-JPEG.json", + "referenceNumber": 375, + "name": "Cornell Lossless JPEG License", + "licenseId": "Cornell-Lossless-JPEG", + "seeAlso": [ + "https://android.googlesource.com/platform/external/dng_sdk/+/refs/heads/master/source/dng_lossless_jpeg.cpp#16", + "https://www.mssl.ucl.ac.uk/~mcrw/src/20050920/proto.h", + "https://gitlab.freedesktop.org/libopenraw/libopenraw/blob/master/lib/ljpegdecompressor.cpp#L32" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CPAL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CPAL-1.0.json", + "referenceNumber": 411, + "name": "Common Public Attribution License 1.0", + "licenseId": "CPAL-1.0", + "seeAlso": [ + "https://opensource.org/licenses/CPAL-1.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/CPL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CPL-1.0.json", + "referenceNumber": 488, + "name": "Common Public License 1.0", + "licenseId": "CPL-1.0", + "seeAlso": [ + "https://opensource.org/licenses/CPL-1.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/CPOL-1.02.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CPOL-1.02.json", + "referenceNumber": 381, + "name": "Code Project Open License 1.02", + "licenseId": "CPOL-1.02", + "seeAlso": [ + "http://www.codeproject.com/info/cpol10.aspx" + ], + "isOsiApproved": false, + "isFsfLibre": false + }, + { + "reference": "https://spdx.org/licenses/Crossword.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Crossword.json", + "referenceNumber": 260, + "name": "Crossword License", + "licenseId": "Crossword", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Crossword" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CrystalStacker.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CrystalStacker.json", + "referenceNumber": 105, + "name": "CrystalStacker License", + "licenseId": "CrystalStacker", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing:CrystalStacker?rd\u003dLicensing/CrystalStacker" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CUA-OPL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CUA-OPL-1.0.json", + "referenceNumber": 108, + "name": "CUA Office Public License v1.0", + "licenseId": "CUA-OPL-1.0", + "seeAlso": [ + "https://opensource.org/licenses/CUA-OPL-1.0" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/Cube.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Cube.json", + "referenceNumber": 182, + "name": "Cube License", + "licenseId": "Cube", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Cube" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/curl.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/curl.json", + "referenceNumber": 332, + "name": "curl License", + "licenseId": "curl", + "seeAlso": [ + "https://github.com/bagder/curl/blob/master/COPYING" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/D-FSL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/D-FSL-1.0.json", + "referenceNumber": 337, + "name": "Deutsche Freie Software Lizenz", + "licenseId": "D-FSL-1.0", + "seeAlso": [ + "http://www.dipp.nrw.de/d-fsl/lizenzen/", + "http://www.dipp.nrw.de/d-fsl/index_html/lizenzen/de/D-FSL-1_0_de.txt", + "http://www.dipp.nrw.de/d-fsl/index_html/lizenzen/en/D-FSL-1_0_en.txt", + "https://www.hbz-nrw.de/produkte/open-access/lizenzen/dfsl", + "https://www.hbz-nrw.de/produkte/open-access/lizenzen/dfsl/deutsche-freie-software-lizenz", + "https://www.hbz-nrw.de/produkte/open-access/lizenzen/dfsl/german-free-software-license", + "https://www.hbz-nrw.de/produkte/open-access/lizenzen/dfsl/D-FSL-1_0_de.txt/at_download/file", + "https://www.hbz-nrw.de/produkte/open-access/lizenzen/dfsl/D-FSL-1_0_en.txt/at_download/file" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/diffmark.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/diffmark.json", + "referenceNumber": 302, + "name": "diffmark license", + "licenseId": "diffmark", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/diffmark" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/DL-DE-BY-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/DL-DE-BY-2.0.json", + "referenceNumber": 93, + "name": "Data licence Germany โ€“ attribution โ€“ version 2.0", + "licenseId": "DL-DE-BY-2.0", + "seeAlso": [ + "https://www.govdata.de/dl-de/by-2-0" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/DOC.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/DOC.json", + "referenceNumber": 262, + "name": "DOC License", + "licenseId": "DOC", + "seeAlso": [ + "http://www.cs.wustl.edu/~schmidt/ACE-copying.html", + "https://www.dre.vanderbilt.edu/~schmidt/ACE-copying.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Dotseqn.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Dotseqn.json", + "referenceNumber": 95, + "name": "Dotseqn License", + "licenseId": "Dotseqn", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Dotseqn" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/DRL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/DRL-1.0.json", + "referenceNumber": 325, + "name": "Detection Rule License 1.0", + "licenseId": "DRL-1.0", + "seeAlso": [ + "https://github.com/Neo23x0/sigma/blob/master/LICENSE.Detection.Rules.md" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/DSDP.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/DSDP.json", + "referenceNumber": 379, + "name": "DSDP License", + "licenseId": "DSDP", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/DSDP" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/dtoa.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/dtoa.json", + "referenceNumber": 144, + "name": "David M. Gay dtoa License", + "licenseId": "dtoa", + "seeAlso": [ + "https://github.com/SWI-Prolog/swipl-devel/blob/master/src/os/dtoa.c" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/dvipdfm.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/dvipdfm.json", + "referenceNumber": 289, + "name": "dvipdfm License", + "licenseId": "dvipdfm", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/dvipdfm" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/ECL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/ECL-1.0.json", + "referenceNumber": 242, + "name": "Educational Community License v1.0", + "licenseId": "ECL-1.0", + "seeAlso": [ + "https://opensource.org/licenses/ECL-1.0" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/ECL-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/ECL-2.0.json", + "referenceNumber": 246, + "name": "Educational Community License v2.0", + "licenseId": "ECL-2.0", + "seeAlso": [ + "https://opensource.org/licenses/ECL-2.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/eCos-2.0.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/eCos-2.0.json", + "referenceNumber": 40, + "name": "eCos license version 2.0", + "licenseId": "eCos-2.0", + "seeAlso": [ + "https://www.gnu.org/licenses/ecos-license.html" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/EFL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/EFL-1.0.json", + "referenceNumber": 485, + "name": "Eiffel Forum License v1.0", + "licenseId": "EFL-1.0", + "seeAlso": [ + "http://www.eiffel-nice.org/license/forum.txt", + "https://opensource.org/licenses/EFL-1.0" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/EFL-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/EFL-2.0.json", + "referenceNumber": 437, + "name": "Eiffel Forum License v2.0", + "licenseId": "EFL-2.0", + "seeAlso": [ + "http://www.eiffel-nice.org/license/eiffel-forum-license-2.html", + "https://opensource.org/licenses/EFL-2.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/eGenix.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/eGenix.json", + "referenceNumber": 170, + "name": "eGenix.com Public License 1.1.0", + "licenseId": "eGenix", + "seeAlso": [ + "http://www.egenix.com/products/eGenix.com-Public-License-1.1.0.pdf", + "https://fedoraproject.org/wiki/Licensing/eGenix.com_Public_License_1.1.0" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Elastic-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Elastic-2.0.json", + "referenceNumber": 547, + "name": "Elastic License 2.0", + "licenseId": "Elastic-2.0", + "seeAlso": [ + "https://www.elastic.co/licensing/elastic-license", + "https://github.com/elastic/elasticsearch/blob/master/licenses/ELASTIC-LICENSE-2.0.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Entessa.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Entessa.json", + "referenceNumber": 89, + "name": "Entessa Public License v1.0", + "licenseId": "Entessa", + "seeAlso": [ + "https://opensource.org/licenses/Entessa" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/EPICS.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/EPICS.json", + "referenceNumber": 508, + "name": "EPICS Open License", + "licenseId": "EPICS", + "seeAlso": [ + "https://epics.anl.gov/license/open.php" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/EPL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/EPL-1.0.json", + "referenceNumber": 388, + "name": "Eclipse Public License 1.0", + "licenseId": "EPL-1.0", + "seeAlso": [ + "http://www.eclipse.org/legal/epl-v10.html", + "https://opensource.org/licenses/EPL-1.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/EPL-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/EPL-2.0.json", + "referenceNumber": 114, + "name": "Eclipse Public License 2.0", + "licenseId": "EPL-2.0", + "seeAlso": [ + "https://www.eclipse.org/legal/epl-2.0", + "https://www.opensource.org/licenses/EPL-2.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/ErlPL-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/ErlPL-1.1.json", + "referenceNumber": 228, + "name": "Erlang Public License v1.1", + "licenseId": "ErlPL-1.1", + "seeAlso": [ + "http://www.erlang.org/EPLICENSE" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/etalab-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/etalab-2.0.json", + "referenceNumber": 273, + "name": "Etalab Open License 2.0", + "licenseId": "etalab-2.0", + "seeAlso": [ + "https://github.com/DISIC/politique-de-contribution-open-source/blob/master/LICENSE.pdf", + "https://raw.githubusercontent.com/DISIC/politique-de-contribution-open-source/master/LICENSE" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/EUDatagrid.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/EUDatagrid.json", + "referenceNumber": 30, + "name": "EU DataGrid Software License", + "licenseId": "EUDatagrid", + "seeAlso": [ + "http://eu-datagrid.web.cern.ch/eu-datagrid/license.html", + "https://opensource.org/licenses/EUDatagrid" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/EUPL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/EUPL-1.0.json", + "referenceNumber": 361, + "name": "European Union Public License 1.0", + "licenseId": "EUPL-1.0", + "seeAlso": [ + "http://ec.europa.eu/idabc/en/document/7330.html", + "http://ec.europa.eu/idabc/servlets/Doc027f.pdf?id\u003d31096" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/EUPL-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/EUPL-1.1.json", + "referenceNumber": 109, + "name": "European Union Public License 1.1", + "licenseId": "EUPL-1.1", + "seeAlso": [ + "https://joinup.ec.europa.eu/software/page/eupl/licence-eupl", + "https://joinup.ec.europa.eu/sites/default/files/custom-page/attachment/eupl1.1.-licence-en_0.pdf", + "https://opensource.org/licenses/EUPL-1.1" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/EUPL-1.2.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/EUPL-1.2.json", + "referenceNumber": 166, + "name": "European Union Public License 1.2", + "licenseId": "EUPL-1.2", + "seeAlso": [ + "https://joinup.ec.europa.eu/page/eupl-text-11-12", + "https://joinup.ec.europa.eu/sites/default/files/custom-page/attachment/eupl_v1.2_en.pdf", + "https://joinup.ec.europa.eu/sites/default/files/custom-page/attachment/2020-03/EUPL-1.2%20EN.txt", + "https://joinup.ec.europa.eu/sites/default/files/inline-files/EUPL%20v1_2%20EN(1).txt", + "http://eur-lex.europa.eu/legal-content/EN/TXT/HTML/?uri\u003dCELEX:32017D0863", + "https://opensource.org/licenses/EUPL-1.2" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/Eurosym.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Eurosym.json", + "referenceNumber": 49, + "name": "Eurosym License", + "licenseId": "Eurosym", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Eurosym" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Fair.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Fair.json", + "referenceNumber": 436, + "name": "Fair License", + "licenseId": "Fair", + "seeAlso": [ + "http://fairlicense.org/", + "https://opensource.org/licenses/Fair" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/FDK-AAC.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/FDK-AAC.json", + "referenceNumber": 159, + "name": "Fraunhofer FDK AAC Codec Library", + "licenseId": "FDK-AAC", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/FDK-AAC", + "https://directory.fsf.org/wiki/License:Fdk" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Frameworx-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Frameworx-1.0.json", + "referenceNumber": 207, + "name": "Frameworx Open License 1.0", + "licenseId": "Frameworx-1.0", + "seeAlso": [ + "https://opensource.org/licenses/Frameworx-1.0" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/FreeBSD-DOC.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/FreeBSD-DOC.json", + "referenceNumber": 168, + "name": "FreeBSD Documentation License", + "licenseId": "FreeBSD-DOC", + "seeAlso": [ + "https://www.freebsd.org/copyright/freebsd-doc-license/" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/FreeImage.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/FreeImage.json", + "referenceNumber": 533, + "name": "FreeImage Public License v1.0", + "licenseId": "FreeImage", + "seeAlso": [ + "http://freeimage.sourceforge.net/freeimage-license.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/FSFAP.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/FSFAP.json", + "referenceNumber": 340, + "name": "FSF All Permissive License", + "licenseId": "FSFAP", + "seeAlso": [ + "https://www.gnu.org/prep/maintain/html_node/License-Notices-for-Other-Files.html" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/FSFUL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/FSFUL.json", + "referenceNumber": 393, + "name": "FSF Unlimited License", + "licenseId": "FSFUL", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/FSF_Unlimited_License" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/FSFULLR.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/FSFULLR.json", + "referenceNumber": 528, + "name": "FSF Unlimited License (with License Retention)", + "licenseId": "FSFULLR", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/FSF_Unlimited_License#License_Retention_Variant" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/FSFULLRWD.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/FSFULLRWD.json", + "referenceNumber": 512, + "name": "FSF Unlimited License (With License Retention and Warranty Disclaimer)", + "licenseId": "FSFULLRWD", + "seeAlso": [ + "https://lists.gnu.org/archive/html/autoconf/2012-04/msg00061.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/FTL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/FTL.json", + "referenceNumber": 209, + "name": "Freetype Project License", + "licenseId": "FTL", + "seeAlso": [ + "http://freetype.fis.uniroma2.it/FTL.TXT", + "http://git.savannah.gnu.org/cgit/freetype/freetype2.git/tree/docs/FTL.TXT", + "http://gitlab.freedesktop.org/freetype/freetype/-/raw/master/docs/FTL.TXT" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/GD.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GD.json", + "referenceNumber": 294, + "name": "GD License", + "licenseId": "GD", + "seeAlso": [ + "https://libgd.github.io/manuals/2.3.0/files/license-txt.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GFDL-1.1.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/GFDL-1.1.json", + "referenceNumber": 59, + "name": "GNU Free Documentation License v1.1", + "licenseId": "GFDL-1.1", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/fdl-1.1.txt" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/GFDL-1.1-invariants-only.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GFDL-1.1-invariants-only.json", + "referenceNumber": 521, + "name": "GNU Free Documentation License v1.1 only - invariants", + "licenseId": "GFDL-1.1-invariants-only", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/fdl-1.1.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GFDL-1.1-invariants-or-later.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GFDL-1.1-invariants-or-later.json", + "referenceNumber": 275, + "name": "GNU Free Documentation License v1.1 or later - invariants", + "licenseId": "GFDL-1.1-invariants-or-later", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/fdl-1.1.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GFDL-1.1-no-invariants-only.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GFDL-1.1-no-invariants-only.json", + "referenceNumber": 124, + "name": "GNU Free Documentation License v1.1 only - no invariants", + "licenseId": "GFDL-1.1-no-invariants-only", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/fdl-1.1.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GFDL-1.1-no-invariants-or-later.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GFDL-1.1-no-invariants-or-later.json", + "referenceNumber": 391, + "name": "GNU Free Documentation License v1.1 or later - no invariants", + "licenseId": "GFDL-1.1-no-invariants-or-later", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/fdl-1.1.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GFDL-1.1-only.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GFDL-1.1-only.json", + "referenceNumber": 11, + "name": "GNU Free Documentation License v1.1 only", + "licenseId": "GFDL-1.1-only", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/fdl-1.1.txt" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/GFDL-1.1-or-later.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GFDL-1.1-or-later.json", + "referenceNumber": 197, + "name": "GNU Free Documentation License v1.1 or later", + "licenseId": "GFDL-1.1-or-later", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/fdl-1.1.txt" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/GFDL-1.2.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/GFDL-1.2.json", + "referenceNumber": 188, + "name": "GNU Free Documentation License v1.2", + "licenseId": "GFDL-1.2", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/fdl-1.2.txt" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/GFDL-1.2-invariants-only.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GFDL-1.2-invariants-only.json", + "referenceNumber": 194, + "name": "GNU Free Documentation License v1.2 only - invariants", + "licenseId": "GFDL-1.2-invariants-only", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/fdl-1.2.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GFDL-1.2-invariants-or-later.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GFDL-1.2-invariants-or-later.json", + "referenceNumber": 313, + "name": "GNU Free Documentation License v1.2 or later - invariants", + "licenseId": "GFDL-1.2-invariants-or-later", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/fdl-1.2.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GFDL-1.2-no-invariants-only.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GFDL-1.2-no-invariants-only.json", + "referenceNumber": 427, + "name": "GNU Free Documentation License v1.2 only - no invariants", + "licenseId": "GFDL-1.2-no-invariants-only", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/fdl-1.2.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GFDL-1.2-no-invariants-or-later.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GFDL-1.2-no-invariants-or-later.json", + "referenceNumber": 285, + "name": "GNU Free Documentation License v1.2 or later - no invariants", + "licenseId": "GFDL-1.2-no-invariants-or-later", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/fdl-1.2.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GFDL-1.2-only.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GFDL-1.2-only.json", + "referenceNumber": 244, + "name": "GNU Free Documentation License v1.2 only", + "licenseId": "GFDL-1.2-only", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/fdl-1.2.txt" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/GFDL-1.2-or-later.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GFDL-1.2-or-later.json", + "referenceNumber": 349, + "name": "GNU Free Documentation License v1.2 or later", + "licenseId": "GFDL-1.2-or-later", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/fdl-1.2.txt" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/GFDL-1.3.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/GFDL-1.3.json", + "referenceNumber": 435, + "name": "GNU Free Documentation License v1.3", + "licenseId": "GFDL-1.3", + "seeAlso": [ + "https://www.gnu.org/licenses/fdl-1.3.txt" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/GFDL-1.3-invariants-only.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GFDL-1.3-invariants-only.json", + "referenceNumber": 37, + "name": "GNU Free Documentation License v1.3 only - invariants", + "licenseId": "GFDL-1.3-invariants-only", + "seeAlso": [ + "https://www.gnu.org/licenses/fdl-1.3.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GFDL-1.3-invariants-or-later.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GFDL-1.3-invariants-or-later.json", + "referenceNumber": 406, + "name": "GNU Free Documentation License v1.3 or later - invariants", + "licenseId": "GFDL-1.3-invariants-or-later", + "seeAlso": [ + "https://www.gnu.org/licenses/fdl-1.3.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GFDL-1.3-no-invariants-only.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GFDL-1.3-no-invariants-only.json", + "referenceNumber": 249, + "name": "GNU Free Documentation License v1.3 only - no invariants", + "licenseId": "GFDL-1.3-no-invariants-only", + "seeAlso": [ + "https://www.gnu.org/licenses/fdl-1.3.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GFDL-1.3-no-invariants-or-later.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GFDL-1.3-no-invariants-or-later.json", + "referenceNumber": 523, + "name": "GNU Free Documentation License v1.3 or later - no invariants", + "licenseId": "GFDL-1.3-no-invariants-or-later", + "seeAlso": [ + "https://www.gnu.org/licenses/fdl-1.3.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GFDL-1.3-only.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GFDL-1.3-only.json", + "referenceNumber": 283, + "name": "GNU Free Documentation License v1.3 only", + "licenseId": "GFDL-1.3-only", + "seeAlso": [ + "https://www.gnu.org/licenses/fdl-1.3.txt" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/GFDL-1.3-or-later.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GFDL-1.3-or-later.json", + "referenceNumber": 336, + "name": "GNU Free Documentation License v1.3 or later", + "licenseId": "GFDL-1.3-or-later", + "seeAlso": [ + "https://www.gnu.org/licenses/fdl-1.3.txt" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/Giftware.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Giftware.json", + "referenceNumber": 329, + "name": "Giftware License", + "licenseId": "Giftware", + "seeAlso": [ + "http://liballeg.org/license.html#allegro-4-the-giftware-license" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GL2PS.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GL2PS.json", + "referenceNumber": 461, + "name": "GL2PS License", + "licenseId": "GL2PS", + "seeAlso": [ + "http://www.geuz.org/gl2ps/COPYING.GL2PS" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Glide.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Glide.json", + "referenceNumber": 353, + "name": "3dfx Glide License", + "licenseId": "Glide", + "seeAlso": [ + "http://www.users.on.net/~triforce/glidexp/COPYING.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Glulxe.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Glulxe.json", + "referenceNumber": 530, + "name": "Glulxe License", + "licenseId": "Glulxe", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Glulxe" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GLWTPL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GLWTPL.json", + "referenceNumber": 318, + "name": "Good Luck With That Public License", + "licenseId": "GLWTPL", + "seeAlso": [ + "https://github.com/me-shaon/GLWTPL/commit/da5f6bc734095efbacb442c0b31e33a65b9d6e85" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/gnuplot.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/gnuplot.json", + "referenceNumber": 455, + "name": "gnuplot License", + "licenseId": "gnuplot", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Gnuplot" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/GPL-1.0.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/GPL-1.0.json", + "referenceNumber": 212, + "name": "GNU General Public License v1.0 only", + "licenseId": "GPL-1.0", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/gpl-1.0-standalone.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GPL-1.0+.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/GPL-1.0+.json", + "referenceNumber": 219, + "name": "GNU General Public License v1.0 or later", + "licenseId": "GPL-1.0+", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/gpl-1.0-standalone.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GPL-1.0-only.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GPL-1.0-only.json", + "referenceNumber": 235, + "name": "GNU General Public License v1.0 only", + "licenseId": "GPL-1.0-only", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/gpl-1.0-standalone.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GPL-1.0-or-later.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GPL-1.0-or-later.json", + "referenceNumber": 85, + "name": "GNU General Public License v1.0 or later", + "licenseId": "GPL-1.0-or-later", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/gpl-1.0-standalone.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GPL-2.0.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/GPL-2.0.json", + "referenceNumber": 1, + "name": "GNU General Public License v2.0 only", + "licenseId": "GPL-2.0", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/gpl-2.0-standalone.html", + "https://opensource.org/licenses/GPL-2.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/GPL-2.0+.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/GPL-2.0+.json", + "referenceNumber": 509, + "name": "GNU General Public License v2.0 or later", + "licenseId": "GPL-2.0+", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/gpl-2.0-standalone.html", + "https://opensource.org/licenses/GPL-2.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/GPL-2.0-only.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GPL-2.0-only.json", + "referenceNumber": 438, + "name": "GNU General Public License v2.0 only", + "licenseId": "GPL-2.0-only", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/gpl-2.0-standalone.html", + "https://opensource.org/licenses/GPL-2.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/GPL-2.0-or-later.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GPL-2.0-or-later.json", + "referenceNumber": 17, + "name": "GNU General Public License v2.0 or later", + "licenseId": "GPL-2.0-or-later", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/gpl-2.0-standalone.html", + "https://opensource.org/licenses/GPL-2.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/GPL-2.0-with-autoconf-exception.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/GPL-2.0-with-autoconf-exception.json", + "referenceNumber": 296, + "name": "GNU General Public License v2.0 w/Autoconf exception", + "licenseId": "GPL-2.0-with-autoconf-exception", + "seeAlso": [ + "http://ac-archive.sourceforge.net/doc/copyright.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GPL-2.0-with-bison-exception.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/GPL-2.0-with-bison-exception.json", + "referenceNumber": 68, + "name": "GNU General Public License v2.0 w/Bison exception", + "licenseId": "GPL-2.0-with-bison-exception", + "seeAlso": [ + "http://git.savannah.gnu.org/cgit/bison.git/tree/data/yacc.c?id\u003d193d7c7054ba7197b0789e14965b739162319b5e#n141" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GPL-2.0-with-classpath-exception.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/GPL-2.0-with-classpath-exception.json", + "referenceNumber": 261, + "name": "GNU General Public License v2.0 w/Classpath exception", + "licenseId": "GPL-2.0-with-classpath-exception", + "seeAlso": [ + "https://www.gnu.org/software/classpath/license.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GPL-2.0-with-font-exception.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/GPL-2.0-with-font-exception.json", + "referenceNumber": 87, + "name": "GNU General Public License v2.0 w/Font exception", + "licenseId": "GPL-2.0-with-font-exception", + "seeAlso": [ + "https://www.gnu.org/licenses/gpl-faq.html#FontException" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GPL-2.0-with-GCC-exception.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/GPL-2.0-with-GCC-exception.json", + "referenceNumber": 468, + "name": "GNU General Public License v2.0 w/GCC Runtime Library exception", + "licenseId": "GPL-2.0-with-GCC-exception", + "seeAlso": [ + "https://gcc.gnu.org/git/?p\u003dgcc.git;a\u003dblob;f\u003dgcc/libgcc1.c;h\u003d762f5143fc6eed57b6797c82710f3538aa52b40b;hb\u003dcb143a3ce4fb417c68f5fa2691a1b1b1053dfba9#l10" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GPL-3.0.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/GPL-3.0.json", + "referenceNumber": 55, + "name": "GNU General Public License v3.0 only", + "licenseId": "GPL-3.0", + "seeAlso": [ + "https://www.gnu.org/licenses/gpl-3.0-standalone.html", + "https://opensource.org/licenses/GPL-3.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/GPL-3.0+.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/GPL-3.0+.json", + "referenceNumber": 146, + "name": "GNU General Public License v3.0 or later", + "licenseId": "GPL-3.0+", + "seeAlso": [ + "https://www.gnu.org/licenses/gpl-3.0-standalone.html", + "https://opensource.org/licenses/GPL-3.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/GPL-3.0-only.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GPL-3.0-only.json", + "referenceNumber": 174, + "name": "GNU General Public License v3.0 only", + "licenseId": "GPL-3.0-only", + "seeAlso": [ + "https://www.gnu.org/licenses/gpl-3.0-standalone.html", + "https://opensource.org/licenses/GPL-3.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/GPL-3.0-or-later.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GPL-3.0-or-later.json", + "referenceNumber": 425, + "name": "GNU General Public License v3.0 or later", + "licenseId": "GPL-3.0-or-later", + "seeAlso": [ + "https://www.gnu.org/licenses/gpl-3.0-standalone.html", + "https://opensource.org/licenses/GPL-3.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/GPL-3.0-with-autoconf-exception.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/GPL-3.0-with-autoconf-exception.json", + "referenceNumber": 484, + "name": "GNU General Public License v3.0 w/Autoconf exception", + "licenseId": "GPL-3.0-with-autoconf-exception", + "seeAlso": [ + "https://www.gnu.org/licenses/autoconf-exception-3.0.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GPL-3.0-with-GCC-exception.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/GPL-3.0-with-GCC-exception.json", + "referenceNumber": 446, + "name": "GNU General Public License v3.0 w/GCC Runtime Library exception", + "licenseId": "GPL-3.0-with-GCC-exception", + "seeAlso": [ + "https://www.gnu.org/licenses/gcc-exception-3.1.html" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/Graphics-Gems.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Graphics-Gems.json", + "referenceNumber": 315, + "name": "Graphics Gems License", + "licenseId": "Graphics-Gems", + "seeAlso": [ + "https://github.com/erich666/GraphicsGems/blob/master/LICENSE.md" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/gSOAP-1.3b.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/gSOAP-1.3b.json", + "referenceNumber": 556, + "name": "gSOAP Public License v1.3b", + "licenseId": "gSOAP-1.3b", + "seeAlso": [ + "http://www.cs.fsu.edu/~engelen/license.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/HaskellReport.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/HaskellReport.json", + "referenceNumber": 135, + "name": "Haskell Language Report License", + "licenseId": "HaskellReport", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Haskell_Language_Report_License" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Hippocratic-2.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Hippocratic-2.1.json", + "referenceNumber": 5, + "name": "Hippocratic License 2.1", + "licenseId": "Hippocratic-2.1", + "seeAlso": [ + "https://firstdonoharm.dev/version/2/1/license.html", + "https://github.com/EthicalSource/hippocratic-license/blob/58c0e646d64ff6fbee275bfe2b9492f914e3ab2a/LICENSE.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/HP-1986.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/HP-1986.json", + "referenceNumber": 98, + "name": "Hewlett-Packard 1986 License", + "licenseId": "HP-1986", + "seeAlso": [ + "https://sourceware.org/git/?p\u003dnewlib-cygwin.git;a\u003dblob;f\u003dnewlib/libc/machine/hppa/memchr.S;h\u003d1cca3e5e8867aa4bffef1f75a5c1bba25c0c441e;hb\u003dHEAD#l2" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/HPND.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/HPND.json", + "referenceNumber": 172, + "name": "Historical Permission Notice and Disclaimer", + "licenseId": "HPND", + "seeAlso": [ + "https://opensource.org/licenses/HPND" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/HPND-export-US.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/HPND-export-US.json", + "referenceNumber": 272, + "name": "HPND with US Government export control warning", + "licenseId": "HPND-export-US", + "seeAlso": [ + "https://www.kermitproject.org/ck90.html#source" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/HPND-Markus-Kuhn.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/HPND-Markus-Kuhn.json", + "referenceNumber": 118, + "name": "Historical Permission Notice and Disclaimer - Markus Kuhn variant", + "licenseId": "HPND-Markus-Kuhn", + "seeAlso": [ + "https://www.cl.cam.ac.uk/~mgk25/ucs/wcwidth.c", + "https://sourceware.org/git/?p\u003dbinutils-gdb.git;a\u003dblob;f\u003dreadline/readline/support/wcwidth.c;h\u003d0f5ec995796f4813abbcf4972aec0378ab74722a;hb\u003dHEAD#l55" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/HPND-sell-variant.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/HPND-sell-variant.json", + "referenceNumber": 424, + "name": "Historical Permission Notice and Disclaimer - sell variant", + "licenseId": "HPND-sell-variant", + "seeAlso": [ + "https://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git/tree/net/sunrpc/auth_gss/gss_generic_token.c?h\u003dv4.19" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/HPND-sell-variant-MIT-disclaimer.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/HPND-sell-variant-MIT-disclaimer.json", + "referenceNumber": 103, + "name": "HPND sell variant with MIT disclaimer", + "licenseId": "HPND-sell-variant-MIT-disclaimer", + "seeAlso": [ + "https://github.com/sigmavirus24/x11-ssh-askpass/blob/master/README" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/HTMLTIDY.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/HTMLTIDY.json", + "referenceNumber": 538, + "name": "HTML Tidy License", + "licenseId": "HTMLTIDY", + "seeAlso": [ + "https://github.com/htacg/tidy-html5/blob/next/README/LICENSE.md" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/IBM-pibs.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/IBM-pibs.json", + "referenceNumber": 96, + "name": "IBM PowerPC Initialization and Boot Software", + "licenseId": "IBM-pibs", + "seeAlso": [ + "http://git.denx.de/?p\u003du-boot.git;a\u003dblob;f\u003darch/powerpc/cpu/ppc4xx/miiphy.c;h\u003d297155fdafa064b955e53e9832de93bfb0cfb85b;hb\u003d9fab4bf4cc077c21e43941866f3f2c196f28670d" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/ICU.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/ICU.json", + "referenceNumber": 254, + "name": "ICU License", + "licenseId": "ICU", + "seeAlso": [ + "http://source.icu-project.org/repos/icu/icu/trunk/license.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/IEC-Code-Components-EULA.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/IEC-Code-Components-EULA.json", + "referenceNumber": 546, + "name": "IEC Code Components End-user licence agreement", + "licenseId": "IEC-Code-Components-EULA", + "seeAlso": [ + "https://www.iec.ch/webstore/custserv/pdf/CC-EULA.pdf", + "https://www.iec.ch/CCv1", + "https://www.iec.ch/copyright" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/IJG.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/IJG.json", + "referenceNumber": 110, + "name": "Independent JPEG Group License", + "licenseId": "IJG", + "seeAlso": [ + "http://dev.w3.org/cvsweb/Amaya/libjpeg/Attic/README?rev\u003d1.2" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/IJG-short.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/IJG-short.json", + "referenceNumber": 373, + "name": "Independent JPEG Group License - short", + "licenseId": "IJG-short", + "seeAlso": [ + "https://sourceforge.net/p/xmedcon/code/ci/master/tree/libs/ljpg/" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/ImageMagick.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/ImageMagick.json", + "referenceNumber": 287, + "name": "ImageMagick License", + "licenseId": "ImageMagick", + "seeAlso": [ + "http://www.imagemagick.org/script/license.php" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/iMatix.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/iMatix.json", + "referenceNumber": 430, + "name": "iMatix Standard Function Library Agreement", + "licenseId": "iMatix", + "seeAlso": [ + "http://legacy.imatix.com/html/sfl/sfl4.htm#license" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/Imlib2.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Imlib2.json", + "referenceNumber": 477, + "name": "Imlib2 License", + "licenseId": "Imlib2", + "seeAlso": [ + "http://trac.enlightenment.org/e/browser/trunk/imlib2/COPYING", + "https://git.enlightenment.org/legacy/imlib2.git/tree/COPYING" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/Info-ZIP.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Info-ZIP.json", + "referenceNumber": 366, + "name": "Info-ZIP License", + "licenseId": "Info-ZIP", + "seeAlso": [ + "http://www.info-zip.org/license.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Inner-Net-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Inner-Net-2.0.json", + "referenceNumber": 241, + "name": "Inner Net License v2.0", + "licenseId": "Inner-Net-2.0", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Inner_Net_License", + "https://sourceware.org/git/?p\u003dglibc.git;a\u003dblob;f\u003dLICENSES;h\u003d530893b1dc9ea00755603c68fb36bd4fc38a7be8;hb\u003dHEAD#l207" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Intel.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Intel.json", + "referenceNumber": 486, + "name": "Intel Open Source License", + "licenseId": "Intel", + "seeAlso": [ + "https://opensource.org/licenses/Intel" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/Intel-ACPI.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Intel-ACPI.json", + "referenceNumber": 65, + "name": "Intel ACPI Software License Agreement", + "licenseId": "Intel-ACPI", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Intel_ACPI_Software_License_Agreement" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Interbase-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Interbase-1.0.json", + "referenceNumber": 553, + "name": "Interbase Public License v1.0", + "licenseId": "Interbase-1.0", + "seeAlso": [ + "https://web.archive.org/web/20060319014854/http://info.borland.com/devsupport/interbase/opensource/IPL.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/IPA.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/IPA.json", + "referenceNumber": 383, + "name": "IPA Font License", + "licenseId": "IPA", + "seeAlso": [ + "https://opensource.org/licenses/IPA" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/IPL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/IPL-1.0.json", + "referenceNumber": 220, + "name": "IBM Public License v1.0", + "licenseId": "IPL-1.0", + "seeAlso": [ + "https://opensource.org/licenses/IPL-1.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/ISC.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/ISC.json", + "referenceNumber": 263, + "name": "ISC License", + "licenseId": "ISC", + "seeAlso": [ + "https://www.isc.org/licenses/", + "https://www.isc.org/downloads/software-support-policy/isc-license/", + "https://opensource.org/licenses/ISC" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/Jam.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Jam.json", + "referenceNumber": 445, + "name": "Jam License", + "licenseId": "Jam", + "seeAlso": [ + "https://www.boost.org/doc/libs/1_35_0/doc/html/jam.html", + "https://web.archive.org/web/20160330173339/https://swarm.workshop.perforce.com/files/guest/perforce_software/jam/src/README" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/JasPer-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/JasPer-2.0.json", + "referenceNumber": 537, + "name": "JasPer License", + "licenseId": "JasPer-2.0", + "seeAlso": [ + "http://www.ece.uvic.ca/~mdadams/jasper/LICENSE" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/JPL-image.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/JPL-image.json", + "referenceNumber": 81, + "name": "JPL Image Use Policy", + "licenseId": "JPL-image", + "seeAlso": [ + "https://www.jpl.nasa.gov/jpl-image-use-policy" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/JPNIC.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/JPNIC.json", + "referenceNumber": 50, + "name": "Japan Network Information Center License", + "licenseId": "JPNIC", + "seeAlso": [ + "https://gitlab.isc.org/isc-projects/bind9/blob/master/COPYRIGHT#L366" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/JSON.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/JSON.json", + "referenceNumber": 543, + "name": "JSON License", + "licenseId": "JSON", + "seeAlso": [ + "http://www.json.org/license.html" + ], + "isOsiApproved": false, + "isFsfLibre": false + }, + { + "reference": "https://spdx.org/licenses/Kazlib.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Kazlib.json", + "referenceNumber": 229, + "name": "Kazlib License", + "licenseId": "Kazlib", + "seeAlso": [ + "http://git.savannah.gnu.org/cgit/kazlib.git/tree/except.c?id\u003d0062df360c2d17d57f6af19b0e444c51feb99036" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Knuth-CTAN.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Knuth-CTAN.json", + "referenceNumber": 222, + "name": "Knuth CTAN License", + "licenseId": "Knuth-CTAN", + "seeAlso": [ + "https://ctan.org/license/knuth" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/LAL-1.2.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/LAL-1.2.json", + "referenceNumber": 176, + "name": "Licence Art Libre 1.2", + "licenseId": "LAL-1.2", + "seeAlso": [ + "http://artlibre.org/licence/lal/licence-art-libre-12/" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/LAL-1.3.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/LAL-1.3.json", + "referenceNumber": 515, + "name": "Licence Art Libre 1.3", + "licenseId": "LAL-1.3", + "seeAlso": [ + "https://artlibre.org/" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Latex2e.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Latex2e.json", + "referenceNumber": 303, + "name": "Latex2e License", + "licenseId": "Latex2e", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Latex2e" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Latex2e-translated-notice.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Latex2e-translated-notice.json", + "referenceNumber": 26, + "name": "Latex2e with translated notice permission", + "licenseId": "Latex2e-translated-notice", + "seeAlso": [ + "https://git.savannah.gnu.org/cgit/indent.git/tree/doc/indent.texi?id\u003da74c6b4ee49397cf330b333da1042bffa60ed14f#n74" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Leptonica.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Leptonica.json", + "referenceNumber": 206, + "name": "Leptonica License", + "licenseId": "Leptonica", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Leptonica" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/LGPL-2.0.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/LGPL-2.0.json", + "referenceNumber": 470, + "name": "GNU Library General Public License v2 only", + "licenseId": "LGPL-2.0", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/lgpl-2.0-standalone.html" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/LGPL-2.0+.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/LGPL-2.0+.json", + "referenceNumber": 82, + "name": "GNU Library General Public License v2 or later", + "licenseId": "LGPL-2.0+", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/lgpl-2.0-standalone.html" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/LGPL-2.0-only.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/LGPL-2.0-only.json", + "referenceNumber": 19, + "name": "GNU Library General Public License v2 only", + "licenseId": "LGPL-2.0-only", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/lgpl-2.0-standalone.html" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/LGPL-2.0-or-later.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/LGPL-2.0-or-later.json", + "referenceNumber": 350, + "name": "GNU Library General Public License v2 or later", + "licenseId": "LGPL-2.0-or-later", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/lgpl-2.0-standalone.html" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/LGPL-2.1.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/LGPL-2.1.json", + "referenceNumber": 554, + "name": "GNU Lesser General Public License v2.1 only", + "licenseId": "LGPL-2.1", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/lgpl-2.1-standalone.html", + "https://opensource.org/licenses/LGPL-2.1" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/LGPL-2.1+.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/LGPL-2.1+.json", + "referenceNumber": 198, + "name": "GNU Lesser General Public License v2.1 or later", + "licenseId": "LGPL-2.1+", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/lgpl-2.1-standalone.html", + "https://opensource.org/licenses/LGPL-2.1" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/LGPL-2.1-only.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/LGPL-2.1-only.json", + "referenceNumber": 359, + "name": "GNU Lesser General Public License v2.1 only", + "licenseId": "LGPL-2.1-only", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/lgpl-2.1-standalone.html", + "https://opensource.org/licenses/LGPL-2.1" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/LGPL-2.1-or-later.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/LGPL-2.1-or-later.json", + "referenceNumber": 66, + "name": "GNU Lesser General Public License v2.1 or later", + "licenseId": "LGPL-2.1-or-later", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/lgpl-2.1-standalone.html", + "https://opensource.org/licenses/LGPL-2.1" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/LGPL-3.0.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/LGPL-3.0.json", + "referenceNumber": 298, + "name": "GNU Lesser General Public License v3.0 only", + "licenseId": "LGPL-3.0", + "seeAlso": [ + "https://www.gnu.org/licenses/lgpl-3.0-standalone.html", + "https://www.gnu.org/licenses/lgpl+gpl-3.0.txt", + "https://opensource.org/licenses/LGPL-3.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/LGPL-3.0+.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/LGPL-3.0+.json", + "referenceNumber": 231, + "name": "GNU Lesser General Public License v3.0 or later", + "licenseId": "LGPL-3.0+", + "seeAlso": [ + "https://www.gnu.org/licenses/lgpl-3.0-standalone.html", + "https://www.gnu.org/licenses/lgpl+gpl-3.0.txt", + "https://opensource.org/licenses/LGPL-3.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/LGPL-3.0-only.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/LGPL-3.0-only.json", + "referenceNumber": 10, + "name": "GNU Lesser General Public License v3.0 only", + "licenseId": "LGPL-3.0-only", + "seeAlso": [ + "https://www.gnu.org/licenses/lgpl-3.0-standalone.html", + "https://www.gnu.org/licenses/lgpl+gpl-3.0.txt", + "https://opensource.org/licenses/LGPL-3.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/LGPL-3.0-or-later.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/LGPL-3.0-or-later.json", + "referenceNumber": 293, + "name": "GNU Lesser General Public License v3.0 or later", + "licenseId": "LGPL-3.0-or-later", + "seeAlso": [ + "https://www.gnu.org/licenses/lgpl-3.0-standalone.html", + "https://www.gnu.org/licenses/lgpl+gpl-3.0.txt", + "https://opensource.org/licenses/LGPL-3.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/LGPLLR.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/LGPLLR.json", + "referenceNumber": 56, + "name": "Lesser General Public License For Linguistic Resources", + "licenseId": "LGPLLR", + "seeAlso": [ + "http://www-igm.univ-mlv.fr/~unitex/lgpllr.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Libpng.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Libpng.json", + "referenceNumber": 21, + "name": "libpng License", + "licenseId": "Libpng", + "seeAlso": [ + "http://www.libpng.org/pub/png/src/libpng-LICENSE.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/libpng-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/libpng-2.0.json", + "referenceNumber": 453, + "name": "PNG Reference Library version 2", + "licenseId": "libpng-2.0", + "seeAlso": [ + "http://www.libpng.org/pub/png/src/libpng-LICENSE.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/libselinux-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/libselinux-1.0.json", + "referenceNumber": 501, + "name": "libselinux public domain notice", + "licenseId": "libselinux-1.0", + "seeAlso": [ + "https://github.com/SELinuxProject/selinux/blob/master/libselinux/LICENSE" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/libtiff.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/libtiff.json", + "referenceNumber": 227, + "name": "libtiff License", + "licenseId": "libtiff", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/libtiff" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/libutil-David-Nugent.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/libutil-David-Nugent.json", + "referenceNumber": 531, + "name": "libutil David Nugent License", + "licenseId": "libutil-David-Nugent", + "seeAlso": [ + "http://web.mit.edu/freebsd/head/lib/libutil/login_ok.3", + "https://cgit.freedesktop.org/libbsd/tree/man/setproctitle.3bsd" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/LiLiQ-P-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/LiLiQ-P-1.1.json", + "referenceNumber": 48, + "name": "Licence Libre du Quรฉbec โ€“ Permissive version 1.1", + "licenseId": "LiLiQ-P-1.1", + "seeAlso": [ + "https://forge.gouv.qc.ca/licence/fr/liliq-v1-1/", + "http://opensource.org/licenses/LiLiQ-P-1.1" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/LiLiQ-R-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/LiLiQ-R-1.1.json", + "referenceNumber": 418, + "name": "Licence Libre du Quรฉbec โ€“ Rรฉciprocitรฉ version 1.1", + "licenseId": "LiLiQ-R-1.1", + "seeAlso": [ + "https://www.forge.gouv.qc.ca/participez/licence-logicielle/licence-libre-du-quebec-liliq-en-francais/licence-libre-du-quebec-reciprocite-liliq-r-v1-1/", + "http://opensource.org/licenses/LiLiQ-R-1.1" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/LiLiQ-Rplus-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/LiLiQ-Rplus-1.1.json", + "referenceNumber": 286, + "name": "Licence Libre du Quรฉbec โ€“ Rรฉciprocitรฉ forte version 1.1", + "licenseId": "LiLiQ-Rplus-1.1", + "seeAlso": [ + "https://www.forge.gouv.qc.ca/participez/licence-logicielle/licence-libre-du-quebec-liliq-en-francais/licence-libre-du-quebec-reciprocite-forte-liliq-r-v1-1/", + "http://opensource.org/licenses/LiLiQ-Rplus-1.1" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/Linux-man-pages-1-para.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Linux-man-pages-1-para.json", + "referenceNumber": 409, + "name": "Linux man-pages - 1 paragraph", + "licenseId": "Linux-man-pages-1-para", + "seeAlso": [ + "https://git.kernel.org/pub/scm/docs/man-pages/man-pages.git/tree/man2/getcpu.2#n4" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Linux-man-pages-copyleft.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Linux-man-pages-copyleft.json", + "referenceNumber": 469, + "name": "Linux man-pages Copyleft", + "licenseId": "Linux-man-pages-copyleft", + "seeAlso": [ + "https://www.kernel.org/doc/man-pages/licenses.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Linux-man-pages-copyleft-2-para.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Linux-man-pages-copyleft-2-para.json", + "referenceNumber": 167, + "name": "Linux man-pages Copyleft - 2 paragraphs", + "licenseId": "Linux-man-pages-copyleft-2-para", + "seeAlso": [ + "https://git.kernel.org/pub/scm/docs/man-pages/man-pages.git/tree/man2/move_pages.2#n5", + "https://git.kernel.org/pub/scm/docs/man-pages/man-pages.git/tree/man2/migrate_pages.2#n8" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Linux-man-pages-copyleft-var.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Linux-man-pages-copyleft-var.json", + "referenceNumber": 400, + "name": "Linux man-pages Copyleft Variant", + "licenseId": "Linux-man-pages-copyleft-var", + "seeAlso": [ + "https://git.kernel.org/pub/scm/docs/man-pages/man-pages.git/tree/man2/set_mempolicy.2#n5" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Linux-OpenIB.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Linux-OpenIB.json", + "referenceNumber": 25, + "name": "Linux Kernel Variant of OpenIB.org license", + "licenseId": "Linux-OpenIB", + "seeAlso": [ + "https://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git/tree/drivers/infiniband/core/sa.h" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/LOOP.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/LOOP.json", + "referenceNumber": 357, + "name": "Common Lisp LOOP License", + "licenseId": "LOOP", + "seeAlso": [ + "https://gitlab.com/embeddable-common-lisp/ecl/-/blob/develop/src/lsp/loop.lsp", + "http://git.savannah.gnu.org/cgit/gcl.git/tree/gcl/lsp/gcl_loop.lsp?h\u003dVersion_2_6_13pre", + "https://sourceforge.net/p/sbcl/sbcl/ci/master/tree/src/code/loop.lisp", + "https://github.com/cl-adams/adams/blob/master/LICENSE.md", + "https://github.com/blakemcbride/eclipse-lisp/blob/master/lisp/loop.lisp", + "https://gitlab.common-lisp.net/cmucl/cmucl/-/blob/master/src/code/loop.lisp" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/LPL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/LPL-1.0.json", + "referenceNumber": 102, + "name": "Lucent Public License Version 1.0", + "licenseId": "LPL-1.0", + "seeAlso": [ + "https://opensource.org/licenses/LPL-1.0" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/LPL-1.02.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/LPL-1.02.json", + "referenceNumber": 0, + "name": "Lucent Public License v1.02", + "licenseId": "LPL-1.02", + "seeAlso": [ + "http://plan9.bell-labs.com/plan9/license.html", + "https://opensource.org/licenses/LPL-1.02" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/LPPL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/LPPL-1.0.json", + "referenceNumber": 541, + "name": "LaTeX Project Public License v1.0", + "licenseId": "LPPL-1.0", + "seeAlso": [ + "http://www.latex-project.org/lppl/lppl-1-0.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/LPPL-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/LPPL-1.1.json", + "referenceNumber": 99, + "name": "LaTeX Project Public License v1.1", + "licenseId": "LPPL-1.1", + "seeAlso": [ + "http://www.latex-project.org/lppl/lppl-1-1.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/LPPL-1.2.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/LPPL-1.2.json", + "referenceNumber": 429, + "name": "LaTeX Project Public License v1.2", + "licenseId": "LPPL-1.2", + "seeAlso": [ + "http://www.latex-project.org/lppl/lppl-1-2.txt" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/LPPL-1.3a.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/LPPL-1.3a.json", + "referenceNumber": 516, + "name": "LaTeX Project Public License v1.3a", + "licenseId": "LPPL-1.3a", + "seeAlso": [ + "http://www.latex-project.org/lppl/lppl-1-3a.txt" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/LPPL-1.3c.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/LPPL-1.3c.json", + "referenceNumber": 237, + "name": "LaTeX Project Public License v1.3c", + "licenseId": "LPPL-1.3c", + "seeAlso": [ + "http://www.latex-project.org/lppl/lppl-1-3c.txt", + "https://opensource.org/licenses/LPPL-1.3c" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/LZMA-SDK-9.11-to-9.20.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/LZMA-SDK-9.11-to-9.20.json", + "referenceNumber": 431, + "name": "LZMA SDK License (versions 9.11 to 9.20)", + "licenseId": "LZMA-SDK-9.11-to-9.20", + "seeAlso": [ + "https://www.7-zip.org/sdk.html", + "https://sourceforge.net/projects/sevenzip/files/LZMA%20SDK/" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/LZMA-SDK-9.22.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/LZMA-SDK-9.22.json", + "referenceNumber": 449, + "name": "LZMA SDK License (versions 9.22 and beyond)", + "licenseId": "LZMA-SDK-9.22", + "seeAlso": [ + "https://www.7-zip.org/sdk.html", + "https://sourceforge.net/projects/sevenzip/files/LZMA%20SDK/" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/MakeIndex.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/MakeIndex.json", + "referenceNumber": 123, + "name": "MakeIndex License", + "licenseId": "MakeIndex", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/MakeIndex" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Martin-Birgmeier.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Martin-Birgmeier.json", + "referenceNumber": 380, + "name": "Martin Birgmeier License", + "licenseId": "Martin-Birgmeier", + "seeAlso": [ + "https://github.com/Perl/perl5/blob/blead/util.c#L6136" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/metamail.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/metamail.json", + "referenceNumber": 474, + "name": "metamail License", + "licenseId": "metamail", + "seeAlso": [ + "https://github.com/Dual-Life/mime-base64/blob/master/Base64.xs#L12" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Minpack.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Minpack.json", + "referenceNumber": 300, + "name": "Minpack License", + "licenseId": "Minpack", + "seeAlso": [ + "http://www.netlib.org/minpack/disclaimer", + "https://gitlab.com/libeigen/eigen/-/blob/master/COPYING.MINPACK" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/MirOS.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/MirOS.json", + "referenceNumber": 443, + "name": "The MirOS Licence", + "licenseId": "MirOS", + "seeAlso": [ + "https://opensource.org/licenses/MirOS" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/MIT.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/MIT.json", + "referenceNumber": 223, + "name": "MIT License", + "licenseId": "MIT", + "seeAlso": [ + "https://opensource.org/licenses/MIT" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/MIT-0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/MIT-0.json", + "referenceNumber": 369, + "name": "MIT No Attribution", + "licenseId": "MIT-0", + "seeAlso": [ + "https://github.com/aws/mit-0", + "https://romanrm.net/mit-zero", + "https://github.com/awsdocs/aws-cloud9-user-guide/blob/master/LICENSE-SAMPLECODE" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/MIT-advertising.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/MIT-advertising.json", + "referenceNumber": 382, + "name": "Enlightenment License (e16)", + "licenseId": "MIT-advertising", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/MIT_With_Advertising" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/MIT-CMU.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/MIT-CMU.json", + "referenceNumber": 24, + "name": "CMU License", + "licenseId": "MIT-CMU", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing:MIT?rd\u003dLicensing/MIT#CMU_Style", + "https://github.com/python-pillow/Pillow/blob/fffb426092c8db24a5f4b6df243a8a3c01fb63cd/LICENSE" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/MIT-enna.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/MIT-enna.json", + "referenceNumber": 465, + "name": "enna License", + "licenseId": "MIT-enna", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/MIT#enna" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/MIT-feh.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/MIT-feh.json", + "referenceNumber": 234, + "name": "feh License", + "licenseId": "MIT-feh", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/MIT#feh" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/MIT-Festival.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/MIT-Festival.json", + "referenceNumber": 423, + "name": "MIT Festival Variant", + "licenseId": "MIT-Festival", + "seeAlso": [ + "https://github.com/festvox/flite/blob/master/COPYING", + "https://github.com/festvox/speech_tools/blob/master/COPYING" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/MIT-Modern-Variant.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/MIT-Modern-Variant.json", + "referenceNumber": 548, + "name": "MIT License Modern Variant", + "licenseId": "MIT-Modern-Variant", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing:MIT#Modern_Variants", + "https://ptolemy.berkeley.edu/copyright.htm", + "https://pirlwww.lpl.arizona.edu/resources/guide/software/PerlTk/Tixlic.html" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/MIT-open-group.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/MIT-open-group.json", + "referenceNumber": 46, + "name": "MIT Open Group variant", + "licenseId": "MIT-open-group", + "seeAlso": [ + "https://gitlab.freedesktop.org/xorg/app/iceauth/-/blob/master/COPYING", + "https://gitlab.freedesktop.org/xorg/app/xvinfo/-/blob/master/COPYING", + "https://gitlab.freedesktop.org/xorg/app/xsetroot/-/blob/master/COPYING", + "https://gitlab.freedesktop.org/xorg/app/xauth/-/blob/master/COPYING" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/MIT-Wu.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/MIT-Wu.json", + "referenceNumber": 421, + "name": "MIT Tom Wu Variant", + "licenseId": "MIT-Wu", + "seeAlso": [ + "https://github.com/chromium/octane/blob/master/crypto.js" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/MITNFA.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/MITNFA.json", + "referenceNumber": 145, + "name": "MIT +no-false-attribs license", + "licenseId": "MITNFA", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/MITNFA" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Motosoto.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Motosoto.json", + "referenceNumber": 358, + "name": "Motosoto License", + "licenseId": "Motosoto", + "seeAlso": [ + "https://opensource.org/licenses/Motosoto" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/mpi-permissive.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/mpi-permissive.json", + "referenceNumber": 295, + "name": "mpi Permissive License", + "licenseId": "mpi-permissive", + "seeAlso": [ + "https://sources.debian.org/src/openmpi/4.1.0-10/ompi/debuggers/msgq_interface.h/?hl\u003d19#L19" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/mpich2.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/mpich2.json", + "referenceNumber": 281, + "name": "mpich2 License", + "licenseId": "mpich2", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/MIT" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/MPL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/MPL-1.0.json", + "referenceNumber": 94, + "name": "Mozilla Public License 1.0", + "licenseId": "MPL-1.0", + "seeAlso": [ + "http://www.mozilla.org/MPL/MPL-1.0.html", + "https://opensource.org/licenses/MPL-1.0" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/MPL-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/MPL-1.1.json", + "referenceNumber": 192, + "name": "Mozilla Public License 1.1", + "licenseId": "MPL-1.1", + "seeAlso": [ + "http://www.mozilla.org/MPL/MPL-1.1.html", + "https://opensource.org/licenses/MPL-1.1" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/MPL-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/MPL-2.0.json", + "referenceNumber": 236, + "name": "Mozilla Public License 2.0", + "licenseId": "MPL-2.0", + "seeAlso": [ + "https://www.mozilla.org/MPL/2.0/", + "https://opensource.org/licenses/MPL-2.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/MPL-2.0-no-copyleft-exception.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/MPL-2.0-no-copyleft-exception.json", + "referenceNumber": 67, + "name": "Mozilla Public License 2.0 (no copyleft exception)", + "licenseId": "MPL-2.0-no-copyleft-exception", + "seeAlso": [ + "https://www.mozilla.org/MPL/2.0/", + "https://opensource.org/licenses/MPL-2.0" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/mplus.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/mplus.json", + "referenceNumber": 157, + "name": "mplus Font License", + "licenseId": "mplus", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing:Mplus?rd\u003dLicensing/mplus" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/MS-LPL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/MS-LPL.json", + "referenceNumber": 181, + "name": "Microsoft Limited Public License", + "licenseId": "MS-LPL", + "seeAlso": [ + "https://www.openhub.net/licenses/mslpl", + "https://github.com/gabegundy/atlserver/blob/master/License.txt", + "https://en.wikipedia.org/wiki/Shared_Source_Initiative#Microsoft_Limited_Public_License_(Ms-LPL)" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/MS-PL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/MS-PL.json", + "referenceNumber": 345, + "name": "Microsoft Public License", + "licenseId": "MS-PL", + "seeAlso": [ + "http://www.microsoft.com/opensource/licenses.mspx", + "https://opensource.org/licenses/MS-PL" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/MS-RL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/MS-RL.json", + "referenceNumber": 23, + "name": "Microsoft Reciprocal License", + "licenseId": "MS-RL", + "seeAlso": [ + "http://www.microsoft.com/opensource/licenses.mspx", + "https://opensource.org/licenses/MS-RL" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/MTLL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/MTLL.json", + "referenceNumber": 80, + "name": "Matrix Template Library License", + "licenseId": "MTLL", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Matrix_Template_Library_License" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/MulanPSL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/MulanPSL-1.0.json", + "referenceNumber": 290, + "name": "Mulan Permissive Software License, Version 1", + "licenseId": "MulanPSL-1.0", + "seeAlso": [ + "https://license.coscl.org.cn/MulanPSL/", + "https://github.com/yuwenlong/longphp/blob/25dfb70cc2a466dc4bb55ba30901cbce08d164b5/LICENSE" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/MulanPSL-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/MulanPSL-2.0.json", + "referenceNumber": 490, + "name": "Mulan Permissive Software License, Version 2", + "licenseId": "MulanPSL-2.0", + "seeAlso": [ + "https://license.coscl.org.cn/MulanPSL2/" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/Multics.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Multics.json", + "referenceNumber": 247, + "name": "Multics License", + "licenseId": "Multics", + "seeAlso": [ + "https://opensource.org/licenses/Multics" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/Mup.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Mup.json", + "referenceNumber": 480, + "name": "Mup License", + "licenseId": "Mup", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Mup" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/NAIST-2003.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/NAIST-2003.json", + "referenceNumber": 39, + "name": "Nara Institute of Science and Technology License (2003)", + "licenseId": "NAIST-2003", + "seeAlso": [ + "https://enterprise.dejacode.com/licenses/public/naist-2003/#license-text", + "https://github.com/nodejs/node/blob/4a19cc8947b1bba2b2d27816ec3d0edf9b28e503/LICENSE#L343" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/NASA-1.3.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/NASA-1.3.json", + "referenceNumber": 360, + "name": "NASA Open Source Agreement 1.3", + "licenseId": "NASA-1.3", + "seeAlso": [ + "http://ti.arc.nasa.gov/opensource/nosa/", + "https://opensource.org/licenses/NASA-1.3" + ], + "isOsiApproved": true, + "isFsfLibre": false + }, + { + "reference": "https://spdx.org/licenses/Naumen.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Naumen.json", + "referenceNumber": 339, + "name": "Naumen Public License", + "licenseId": "Naumen", + "seeAlso": [ + "https://opensource.org/licenses/Naumen" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/NBPL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/NBPL-1.0.json", + "referenceNumber": 517, + "name": "Net Boolean Public License v1", + "licenseId": "NBPL-1.0", + "seeAlso": [ + "http://www.openldap.org/devel/gitweb.cgi?p\u003dopenldap.git;a\u003dblob;f\u003dLICENSE;hb\u003d37b4b3f6cc4bf34e1d3dec61e69914b9819d8894" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/NCGL-UK-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/NCGL-UK-2.0.json", + "referenceNumber": 113, + "name": "Non-Commercial Government Licence", + "licenseId": "NCGL-UK-2.0", + "seeAlso": [ + "http://www.nationalarchives.gov.uk/doc/non-commercial-government-licence/version/2/" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/NCSA.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/NCSA.json", + "referenceNumber": 199, + "name": "University of Illinois/NCSA Open Source License", + "licenseId": "NCSA", + "seeAlso": [ + "http://otm.illinois.edu/uiuc_openSource", + "https://opensource.org/licenses/NCSA" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/Net-SNMP.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Net-SNMP.json", + "referenceNumber": 74, + "name": "Net-SNMP License", + "licenseId": "Net-SNMP", + "seeAlso": [ + "http://net-snmp.sourceforge.net/about/license.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/NetCDF.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/NetCDF.json", + "referenceNumber": 321, + "name": "NetCDF license", + "licenseId": "NetCDF", + "seeAlso": [ + "http://www.unidata.ucar.edu/software/netcdf/copyright.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Newsletr.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Newsletr.json", + "referenceNumber": 539, + "name": "Newsletr License", + "licenseId": "Newsletr", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Newsletr" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/NGPL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/NGPL.json", + "referenceNumber": 301, + "name": "Nethack General Public License", + "licenseId": "NGPL", + "seeAlso": [ + "https://opensource.org/licenses/NGPL" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/NICTA-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/NICTA-1.0.json", + "referenceNumber": 545, + "name": "NICTA Public Software License, Version 1.0", + "licenseId": "NICTA-1.0", + "seeAlso": [ + "https://opensource.apple.com/source/mDNSResponder/mDNSResponder-320.10/mDNSPosix/nss_ReadMe.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/NIST-PD.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/NIST-PD.json", + "referenceNumber": 346, + "name": "NIST Public Domain Notice", + "licenseId": "NIST-PD", + "seeAlso": [ + "https://github.com/tcheneau/simpleRPL/blob/e645e69e38dd4e3ccfeceb2db8cba05b7c2e0cd3/LICENSE.txt", + "https://github.com/tcheneau/Routing/blob/f09f46fcfe636107f22f2c98348188a65a135d98/README.md" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/NIST-PD-fallback.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/NIST-PD-fallback.json", + "referenceNumber": 319, + "name": "NIST Public Domain Notice with license fallback", + "licenseId": "NIST-PD-fallback", + "seeAlso": [ + "https://github.com/usnistgov/jsip/blob/59700e6926cbe96c5cdae897d9a7d2656b42abe3/LICENSE", + "https://github.com/usnistgov/fipy/blob/86aaa5c2ba2c6f1be19593c5986071cf6568cc34/LICENSE.rst" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/NIST-Software.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/NIST-Software.json", + "referenceNumber": 413, + "name": "NIST Software License", + "licenseId": "NIST-Software", + "seeAlso": [ + "https://github.com/open-quantum-safe/liboqs/blob/40b01fdbb270f8614fde30e65d30e9da18c02393/src/common/rand/rand_nist.c#L1-L15" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/NLOD-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/NLOD-1.0.json", + "referenceNumber": 525, + "name": "Norwegian Licence for Open Government Data (NLOD) 1.0", + "licenseId": "NLOD-1.0", + "seeAlso": [ + "http://data.norge.no/nlod/en/1.0" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/NLOD-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/NLOD-2.0.json", + "referenceNumber": 52, + "name": "Norwegian Licence for Open Government Data (NLOD) 2.0", + "licenseId": "NLOD-2.0", + "seeAlso": [ + "http://data.norge.no/nlod/en/2.0" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/NLPL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/NLPL.json", + "referenceNumber": 529, + "name": "No Limit Public License", + "licenseId": "NLPL", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/NLPL" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Nokia.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Nokia.json", + "referenceNumber": 88, + "name": "Nokia Open Source License", + "licenseId": "Nokia", + "seeAlso": [ + "https://opensource.org/licenses/nokia" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/NOSL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/NOSL.json", + "referenceNumber": 417, + "name": "Netizen Open Source License", + "licenseId": "NOSL", + "seeAlso": [ + "http://bits.netizen.com.au/licenses/NOSL/nosl.txt" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/Noweb.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Noweb.json", + "referenceNumber": 398, + "name": "Noweb License", + "licenseId": "Noweb", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Noweb" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/NPL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/NPL-1.0.json", + "referenceNumber": 53, + "name": "Netscape Public License v1.0", + "licenseId": "NPL-1.0", + "seeAlso": [ + "http://www.mozilla.org/MPL/NPL/1.0/" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/NPL-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/NPL-1.1.json", + "referenceNumber": 51, + "name": "Netscape Public License v1.1", + "licenseId": "NPL-1.1", + "seeAlso": [ + "http://www.mozilla.org/MPL/NPL/1.1/" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/NPOSL-3.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/NPOSL-3.0.json", + "referenceNumber": 555, + "name": "Non-Profit Open Software License 3.0", + "licenseId": "NPOSL-3.0", + "seeAlso": [ + "https://opensource.org/licenses/NOSL3.0" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/NRL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/NRL.json", + "referenceNumber": 458, + "name": "NRL License", + "licenseId": "NRL", + "seeAlso": [ + "http://web.mit.edu/network/isakmp/nrllicense.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/NTP.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/NTP.json", + "referenceNumber": 2, + "name": "NTP License", + "licenseId": "NTP", + "seeAlso": [ + "https://opensource.org/licenses/NTP" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/NTP-0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/NTP-0.json", + "referenceNumber": 476, + "name": "NTP No Attribution", + "licenseId": "NTP-0", + "seeAlso": [ + "https://github.com/tytso/e2fsprogs/blob/master/lib/et/et_name.c" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Nunit.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/Nunit.json", + "referenceNumber": 456, + "name": "Nunit License", + "licenseId": "Nunit", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Nunit" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/O-UDA-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/O-UDA-1.0.json", + "referenceNumber": 542, + "name": "Open Use of Data Agreement v1.0", + "licenseId": "O-UDA-1.0", + "seeAlso": [ + "https://github.com/microsoft/Open-Use-of-Data-Agreement/blob/v1.0/O-UDA-1.0.md", + "https://cdla.dev/open-use-of-data-agreement-v1-0/" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OCCT-PL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OCCT-PL.json", + "referenceNumber": 309, + "name": "Open CASCADE Technology Public License", + "licenseId": "OCCT-PL", + "seeAlso": [ + "http://www.opencascade.com/content/occt-public-license" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OCLC-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OCLC-2.0.json", + "referenceNumber": 370, + "name": "OCLC Research Public License 2.0", + "licenseId": "OCLC-2.0", + "seeAlso": [ + "http://www.oclc.org/research/activities/software/license/v2final.htm", + "https://opensource.org/licenses/OCLC-2.0" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/ODbL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/ODbL-1.0.json", + "referenceNumber": 356, + "name": "Open Data Commons Open Database License v1.0", + "licenseId": "ODbL-1.0", + "seeAlso": [ + "http://www.opendatacommons.org/licenses/odbl/1.0/", + "https://opendatacommons.org/licenses/odbl/1-0/" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/ODC-By-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/ODC-By-1.0.json", + "referenceNumber": 64, + "name": "Open Data Commons Attribution License v1.0", + "licenseId": "ODC-By-1.0", + "seeAlso": [ + "https://opendatacommons.org/licenses/by/1.0/" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OFFIS.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OFFIS.json", + "referenceNumber": 104, + "name": "OFFIS License", + "licenseId": "OFFIS", + "seeAlso": [ + "https://sourceforge.net/p/xmedcon/code/ci/master/tree/libs/dicom/README" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OFL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OFL-1.0.json", + "referenceNumber": 419, + "name": "SIL Open Font License 1.0", + "licenseId": "OFL-1.0", + "seeAlso": [ + "http://scripts.sil.org/cms/scripts/page.php?item_id\u003dOFL10_web" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/OFL-1.0-no-RFN.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OFL-1.0-no-RFN.json", + "referenceNumber": 354, + "name": "SIL Open Font License 1.0 with no Reserved Font Name", + "licenseId": "OFL-1.0-no-RFN", + "seeAlso": [ + "http://scripts.sil.org/cms/scripts/page.php?item_id\u003dOFL10_web" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OFL-1.0-RFN.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OFL-1.0-RFN.json", + "referenceNumber": 250, + "name": "SIL Open Font License 1.0 with Reserved Font Name", + "licenseId": "OFL-1.0-RFN", + "seeAlso": [ + "http://scripts.sil.org/cms/scripts/page.php?item_id\u003dOFL10_web" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OFL-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OFL-1.1.json", + "referenceNumber": 3, + "name": "SIL Open Font License 1.1", + "licenseId": "OFL-1.1", + "seeAlso": [ + "http://scripts.sil.org/cms/scripts/page.php?item_id\u003dOFL_web", + "https://opensource.org/licenses/OFL-1.1" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/OFL-1.1-no-RFN.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OFL-1.1-no-RFN.json", + "referenceNumber": 117, + "name": "SIL Open Font License 1.1 with no Reserved Font Name", + "licenseId": "OFL-1.1-no-RFN", + "seeAlso": [ + "http://scripts.sil.org/cms/scripts/page.php?item_id\u003dOFL_web", + "https://opensource.org/licenses/OFL-1.1" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/OFL-1.1-RFN.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OFL-1.1-RFN.json", + "referenceNumber": 518, + "name": "SIL Open Font License 1.1 with Reserved Font Name", + "licenseId": "OFL-1.1-RFN", + "seeAlso": [ + "http://scripts.sil.org/cms/scripts/page.php?item_id\u003dOFL_web", + "https://opensource.org/licenses/OFL-1.1" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/OGC-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OGC-1.0.json", + "referenceNumber": 15, + "name": "OGC Software License, Version 1.0", + "licenseId": "OGC-1.0", + "seeAlso": [ + "https://www.ogc.org/ogc/software/1.0" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OGDL-Taiwan-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OGDL-Taiwan-1.0.json", + "referenceNumber": 284, + "name": "Taiwan Open Government Data License, version 1.0", + "licenseId": "OGDL-Taiwan-1.0", + "seeAlso": [ + "https://data.gov.tw/license" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OGL-Canada-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OGL-Canada-2.0.json", + "referenceNumber": 214, + "name": "Open Government Licence - Canada", + "licenseId": "OGL-Canada-2.0", + "seeAlso": [ + "https://open.canada.ca/en/open-government-licence-canada" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OGL-UK-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OGL-UK-1.0.json", + "referenceNumber": 165, + "name": "Open Government Licence v1.0", + "licenseId": "OGL-UK-1.0", + "seeAlso": [ + "http://www.nationalarchives.gov.uk/doc/open-government-licence/version/1/" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OGL-UK-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OGL-UK-2.0.json", + "referenceNumber": 304, + "name": "Open Government Licence v2.0", + "licenseId": "OGL-UK-2.0", + "seeAlso": [ + "http://www.nationalarchives.gov.uk/doc/open-government-licence/version/2/" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OGL-UK-3.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OGL-UK-3.0.json", + "referenceNumber": 415, + "name": "Open Government Licence v3.0", + "licenseId": "OGL-UK-3.0", + "seeAlso": [ + "http://www.nationalarchives.gov.uk/doc/open-government-licence/version/3/" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OGTSL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OGTSL.json", + "referenceNumber": 133, + "name": "Open Group Test Suite License", + "licenseId": "OGTSL", + "seeAlso": [ + "http://www.opengroup.org/testing/downloads/The_Open_Group_TSL.txt", + "https://opensource.org/licenses/OGTSL" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/OLDAP-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OLDAP-1.1.json", + "referenceNumber": 208, + "name": "Open LDAP Public License v1.1", + "licenseId": "OLDAP-1.1", + "seeAlso": [ + "http://www.openldap.org/devel/gitweb.cgi?p\u003dopenldap.git;a\u003dblob;f\u003dLICENSE;hb\u003d806557a5ad59804ef3a44d5abfbe91d706b0791f" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OLDAP-1.2.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OLDAP-1.2.json", + "referenceNumber": 100, + "name": "Open LDAP Public License v1.2", + "licenseId": "OLDAP-1.2", + "seeAlso": [ + "http://www.openldap.org/devel/gitweb.cgi?p\u003dopenldap.git;a\u003dblob;f\u003dLICENSE;hb\u003d42b0383c50c299977b5893ee695cf4e486fb0dc7" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OLDAP-1.3.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OLDAP-1.3.json", + "referenceNumber": 328, + "name": "Open LDAP Public License v1.3", + "licenseId": "OLDAP-1.3", + "seeAlso": [ + "http://www.openldap.org/devel/gitweb.cgi?p\u003dopenldap.git;a\u003dblob;f\u003dLICENSE;hb\u003de5f8117f0ce088d0bd7a8e18ddf37eaa40eb09b1" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OLDAP-1.4.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OLDAP-1.4.json", + "referenceNumber": 333, + "name": "Open LDAP Public License v1.4", + "licenseId": "OLDAP-1.4", + "seeAlso": [ + "http://www.openldap.org/devel/gitweb.cgi?p\u003dopenldap.git;a\u003dblob;f\u003dLICENSE;hb\u003dc9f95c2f3f2ffb5e0ae55fe7388af75547660941" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OLDAP-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OLDAP-2.0.json", + "referenceNumber": 519, + "name": "Open LDAP Public License v2.0 (or possibly 2.0A and 2.0B)", + "licenseId": "OLDAP-2.0", + "seeAlso": [ + "http://www.openldap.org/devel/gitweb.cgi?p\u003dopenldap.git;a\u003dblob;f\u003dLICENSE;hb\u003dcbf50f4e1185a21abd4c0a54d3f4341fe28f36ea" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OLDAP-2.0.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OLDAP-2.0.1.json", + "referenceNumber": 324, + "name": "Open LDAP Public License v2.0.1", + "licenseId": "OLDAP-2.0.1", + "seeAlso": [ + "http://www.openldap.org/devel/gitweb.cgi?p\u003dopenldap.git;a\u003dblob;f\u003dLICENSE;hb\u003db6d68acd14e51ca3aab4428bf26522aa74873f0e" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OLDAP-2.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OLDAP-2.1.json", + "referenceNumber": 402, + "name": "Open LDAP Public License v2.1", + "licenseId": "OLDAP-2.1", + "seeAlso": [ + "http://www.openldap.org/devel/gitweb.cgi?p\u003dopenldap.git;a\u003dblob;f\u003dLICENSE;hb\u003db0d176738e96a0d3b9f85cb51e140a86f21be715" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OLDAP-2.2.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OLDAP-2.2.json", + "referenceNumber": 163, + "name": "Open LDAP Public License v2.2", + "licenseId": "OLDAP-2.2", + "seeAlso": [ + "http://www.openldap.org/devel/gitweb.cgi?p\u003dopenldap.git;a\u003dblob;f\u003dLICENSE;hb\u003d470b0c18ec67621c85881b2733057fecf4a1acc3" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OLDAP-2.2.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OLDAP-2.2.1.json", + "referenceNumber": 451, + "name": "Open LDAP Public License v2.2.1", + "licenseId": "OLDAP-2.2.1", + "seeAlso": [ + "http://www.openldap.org/devel/gitweb.cgi?p\u003dopenldap.git;a\u003dblob;f\u003dLICENSE;hb\u003d4bc786f34b50aa301be6f5600f58a980070f481e" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OLDAP-2.2.2.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OLDAP-2.2.2.json", + "referenceNumber": 140, + "name": "Open LDAP Public License 2.2.2", + "licenseId": "OLDAP-2.2.2", + "seeAlso": [ + "http://www.openldap.org/devel/gitweb.cgi?p\u003dopenldap.git;a\u003dblob;f\u003dLICENSE;hb\u003ddf2cc1e21eb7c160695f5b7cffd6296c151ba188" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OLDAP-2.3.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OLDAP-2.3.json", + "referenceNumber": 33, + "name": "Open LDAP Public License v2.3", + "licenseId": "OLDAP-2.3", + "seeAlso": [ + "http://www.openldap.org/devel/gitweb.cgi?p\u003dopenldap.git;a\u003dblob;f\u003dLICENSE;hb\u003dd32cf54a32d581ab475d23c810b0a7fbaf8d63c3" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/OLDAP-2.4.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OLDAP-2.4.json", + "referenceNumber": 447, + "name": "Open LDAP Public License v2.4", + "licenseId": "OLDAP-2.4", + "seeAlso": [ + "http://www.openldap.org/devel/gitweb.cgi?p\u003dopenldap.git;a\u003dblob;f\u003dLICENSE;hb\u003dcd1284c4a91a8a380d904eee68d1583f989ed386" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OLDAP-2.5.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OLDAP-2.5.json", + "referenceNumber": 549, + "name": "Open LDAP Public License v2.5", + "licenseId": "OLDAP-2.5", + "seeAlso": [ + "http://www.openldap.org/devel/gitweb.cgi?p\u003dopenldap.git;a\u003dblob;f\u003dLICENSE;hb\u003d6852b9d90022e8593c98205413380536b1b5a7cf" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OLDAP-2.6.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OLDAP-2.6.json", + "referenceNumber": 297, + "name": "Open LDAP Public License v2.6", + "licenseId": "OLDAP-2.6", + "seeAlso": [ + "http://www.openldap.org/devel/gitweb.cgi?p\u003dopenldap.git;a\u003dblob;f\u003dLICENSE;hb\u003d1cae062821881f41b73012ba816434897abf4205" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OLDAP-2.7.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OLDAP-2.7.json", + "referenceNumber": 134, + "name": "Open LDAP Public License v2.7", + "licenseId": "OLDAP-2.7", + "seeAlso": [ + "http://www.openldap.org/devel/gitweb.cgi?p\u003dopenldap.git;a\u003dblob;f\u003dLICENSE;hb\u003d47c2415c1df81556eeb39be6cad458ef87c534a2" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/OLDAP-2.8.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OLDAP-2.8.json", + "referenceNumber": 540, + "name": "Open LDAP Public License v2.8", + "licenseId": "OLDAP-2.8", + "seeAlso": [ + "http://www.openldap.org/software/release/license.html" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/OLFL-1.3.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OLFL-1.3.json", + "referenceNumber": 482, + "name": "Open Logistics Foundation License Version 1.3", + "licenseId": "OLFL-1.3", + "seeAlso": [ + "https://openlogisticsfoundation.org/licenses/", + "https://opensource.org/license/olfl-1-3/" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/OML.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OML.json", + "referenceNumber": 155, + "name": "Open Market License", + "licenseId": "OML", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Open_Market_License" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OpenPBS-2.3.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OpenPBS-2.3.json", + "referenceNumber": 377, + "name": "OpenPBS v2.3 Software License", + "licenseId": "OpenPBS-2.3", + "seeAlso": [ + "https://github.com/adaptivecomputing/torque/blob/master/PBS_License.txt", + "https://www.mcs.anl.gov/research/projects/openpbs/PBS_License.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OpenSSL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OpenSSL.json", + "referenceNumber": 276, + "name": "OpenSSL License", + "licenseId": "OpenSSL", + "seeAlso": [ + "http://www.openssl.org/source/license.html" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/OPL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OPL-1.0.json", + "referenceNumber": 510, + "name": "Open Public License v1.0", + "licenseId": "OPL-1.0", + "seeAlso": [ + "http://old.koalateam.com/jackaroo/OPL_1_0.TXT", + "https://fedoraproject.org/wiki/Licensing/Open_Public_License" + ], + "isOsiApproved": false, + "isFsfLibre": false + }, + { + "reference": "https://spdx.org/licenses/OPL-UK-3.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OPL-UK-3.0.json", + "referenceNumber": 257, + "name": "United Kingdom Open Parliament Licence v3.0", + "licenseId": "OPL-UK-3.0", + "seeAlso": [ + "https://www.parliament.uk/site-information/copyright-parliament/open-parliament-licence/" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OPUBL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OPUBL-1.0.json", + "referenceNumber": 514, + "name": "Open Publication License v1.0", + "licenseId": "OPUBL-1.0", + "seeAlso": [ + "http://opencontent.org/openpub/", + "https://www.debian.org/opl", + "https://www.ctan.org/license/opl" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OSET-PL-2.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OSET-PL-2.1.json", + "referenceNumber": 274, + "name": "OSET Public License version 2.1", + "licenseId": "OSET-PL-2.1", + "seeAlso": [ + "http://www.osetfoundation.org/public-license", + "https://opensource.org/licenses/OPL-2.1" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/OSL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OSL-1.0.json", + "referenceNumber": 371, + "name": "Open Software License 1.0", + "licenseId": "OSL-1.0", + "seeAlso": [ + "https://opensource.org/licenses/OSL-1.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/OSL-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OSL-1.1.json", + "referenceNumber": 310, + "name": "Open Software License 1.1", + "licenseId": "OSL-1.1", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/OSL1.1" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/OSL-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OSL-2.0.json", + "referenceNumber": 405, + "name": "Open Software License 2.0", + "licenseId": "OSL-2.0", + "seeAlso": [ + "http://web.archive.org/web/20041020171434/http://www.rosenlaw.com/osl2.0.html" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/OSL-2.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OSL-2.1.json", + "referenceNumber": 251, + "name": "Open Software License 2.1", + "licenseId": "OSL-2.1", + "seeAlso": [ + "http://web.archive.org/web/20050212003940/http://www.rosenlaw.com/osl21.htm", + "https://opensource.org/licenses/OSL-2.1" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/OSL-3.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OSL-3.0.json", + "referenceNumber": 20, + "name": "Open Software License 3.0", + "licenseId": "OSL-3.0", + "seeAlso": [ + "https://web.archive.org/web/20120101081418/http://rosenlaw.com:80/OSL3.0.htm", + "https://opensource.org/licenses/OSL-3.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/Parity-6.0.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Parity-6.0.0.json", + "referenceNumber": 69, + "name": "The Parity Public License 6.0.0", + "licenseId": "Parity-6.0.0", + "seeAlso": [ + "https://paritylicense.com/versions/6.0.0.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Parity-7.0.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Parity-7.0.0.json", + "referenceNumber": 323, + "name": "The Parity Public License 7.0.0", + "licenseId": "Parity-7.0.0", + "seeAlso": [ + "https://paritylicense.com/versions/7.0.0.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/PDDL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/PDDL-1.0.json", + "referenceNumber": 42, + "name": "Open Data Commons Public Domain Dedication \u0026 License 1.0", + "licenseId": "PDDL-1.0", + "seeAlso": [ + "http://opendatacommons.org/licenses/pddl/1.0/", + "https://opendatacommons.org/licenses/pddl/" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/PHP-3.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/PHP-3.0.json", + "referenceNumber": 450, + "name": "PHP License v3.0", + "licenseId": "PHP-3.0", + "seeAlso": [ + "http://www.php.net/license/3_0.txt", + "https://opensource.org/licenses/PHP-3.0" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/PHP-3.01.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/PHP-3.01.json", + "referenceNumber": 58, + "name": "PHP License v3.01", + "licenseId": "PHP-3.01", + "seeAlso": [ + "http://www.php.net/license/3_01.txt" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/Plexus.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Plexus.json", + "referenceNumber": 97, + "name": "Plexus Classworlds License", + "licenseId": "Plexus", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Plexus_Classworlds_License" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/PolyForm-Noncommercial-1.0.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/PolyForm-Noncommercial-1.0.0.json", + "referenceNumber": 112, + "name": "PolyForm Noncommercial License 1.0.0", + "licenseId": "PolyForm-Noncommercial-1.0.0", + "seeAlso": [ + "https://polyformproject.org/licenses/noncommercial/1.0.0" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/PolyForm-Small-Business-1.0.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/PolyForm-Small-Business-1.0.0.json", + "referenceNumber": 161, + "name": "PolyForm Small Business License 1.0.0", + "licenseId": "PolyForm-Small-Business-1.0.0", + "seeAlso": [ + "https://polyformproject.org/licenses/small-business/1.0.0" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/PostgreSQL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/PostgreSQL.json", + "referenceNumber": 527, + "name": "PostgreSQL License", + "licenseId": "PostgreSQL", + "seeAlso": [ + "http://www.postgresql.org/about/licence", + "https://opensource.org/licenses/PostgreSQL" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/PSF-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/PSF-2.0.json", + "referenceNumber": 86, + "name": "Python Software Foundation License 2.0", + "licenseId": "PSF-2.0", + "seeAlso": [ + "https://opensource.org/licenses/Python-2.0" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/psfrag.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/psfrag.json", + "referenceNumber": 190, + "name": "psfrag License", + "licenseId": "psfrag", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/psfrag" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/psutils.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/psutils.json", + "referenceNumber": 27, + "name": "psutils License", + "licenseId": "psutils", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/psutils" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Python-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Python-2.0.json", + "referenceNumber": 459, + "name": "Python License 2.0", + "licenseId": "Python-2.0", + "seeAlso": [ + "https://opensource.org/licenses/Python-2.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/Python-2.0.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Python-2.0.1.json", + "referenceNumber": 307, + "name": "Python License 2.0.1", + "licenseId": "Python-2.0.1", + "seeAlso": [ + "https://www.python.org/download/releases/2.0.1/license/", + "https://docs.python.org/3/license.html", + "https://github.com/python/cpython/blob/main/LICENSE" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Qhull.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Qhull.json", + "referenceNumber": 158, + "name": "Qhull License", + "licenseId": "Qhull", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Qhull" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/QPL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/QPL-1.0.json", + "referenceNumber": 472, + "name": "Q Public License 1.0", + "licenseId": "QPL-1.0", + "seeAlso": [ + "http://doc.qt.nokia.com/3.3/license.html", + "https://opensource.org/licenses/QPL-1.0", + "https://doc.qt.io/archives/3.3/license.html" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/QPL-1.0-INRIA-2004.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/QPL-1.0-INRIA-2004.json", + "referenceNumber": 62, + "name": "Q Public License 1.0 - INRIA 2004 variant", + "licenseId": "QPL-1.0-INRIA-2004", + "seeAlso": [ + "https://github.com/maranget/hevea/blob/master/LICENSE" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Rdisc.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Rdisc.json", + "referenceNumber": 224, + "name": "Rdisc License", + "licenseId": "Rdisc", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Rdisc_License" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/RHeCos-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/RHeCos-1.1.json", + "referenceNumber": 422, + "name": "Red Hat eCos Public License v1.1", + "licenseId": "RHeCos-1.1", + "seeAlso": [ + "http://ecos.sourceware.org/old-license.html" + ], + "isOsiApproved": false, + "isFsfLibre": false + }, + { + "reference": "https://spdx.org/licenses/RPL-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/RPL-1.1.json", + "referenceNumber": 16, + "name": "Reciprocal Public License 1.1", + "licenseId": "RPL-1.1", + "seeAlso": [ + "https://opensource.org/licenses/RPL-1.1" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/RPL-1.5.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/RPL-1.5.json", + "referenceNumber": 136, + "name": "Reciprocal Public License 1.5", + "licenseId": "RPL-1.5", + "seeAlso": [ + "https://opensource.org/licenses/RPL-1.5" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/RPSL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/RPSL-1.0.json", + "referenceNumber": 230, + "name": "RealNetworks Public Source License v1.0", + "licenseId": "RPSL-1.0", + "seeAlso": [ + "https://helixcommunity.org/content/rpsl", + "https://opensource.org/licenses/RPSL-1.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/RSA-MD.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/RSA-MD.json", + "referenceNumber": 506, + "name": "RSA Message-Digest License", + "licenseId": "RSA-MD", + "seeAlso": [ + "http://www.faqs.org/rfcs/rfc1321.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/RSCPL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/RSCPL.json", + "referenceNumber": 169, + "name": "Ricoh Source Code Public License", + "licenseId": "RSCPL", + "seeAlso": [ + "http://wayback.archive.org/web/20060715140826/http://www.risource.org/RPL/RPL-1.0A.shtml", + "https://opensource.org/licenses/RSCPL" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/Ruby.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Ruby.json", + "referenceNumber": 60, + "name": "Ruby License", + "licenseId": "Ruby", + "seeAlso": [ + "http://www.ruby-lang.org/en/LICENSE.txt" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/SAX-PD.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/SAX-PD.json", + "referenceNumber": 390, + "name": "Sax Public Domain Notice", + "licenseId": "SAX-PD", + "seeAlso": [ + "http://www.saxproject.org/copying.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Saxpath.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Saxpath.json", + "referenceNumber": 372, + "name": "Saxpath License", + "licenseId": "Saxpath", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Saxpath_License" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/SCEA.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/SCEA.json", + "referenceNumber": 173, + "name": "SCEA Shared Source License", + "licenseId": "SCEA", + "seeAlso": [ + "http://research.scea.com/scea_shared_source_license.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/SchemeReport.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/SchemeReport.json", + "referenceNumber": 38, + "name": "Scheme Language Report License", + "licenseId": "SchemeReport", + "seeAlso": [], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Sendmail.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Sendmail.json", + "referenceNumber": 18, + "name": "Sendmail License", + "licenseId": "Sendmail", + "seeAlso": [ + "http://www.sendmail.com/pdfs/open_source/sendmail_license.pdf", + "https://web.archive.org/web/20160322142305/https://www.sendmail.com/pdfs/open_source/sendmail_license.pdf" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Sendmail-8.23.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Sendmail-8.23.json", + "referenceNumber": 344, + "name": "Sendmail License 8.23", + "licenseId": "Sendmail-8.23", + "seeAlso": [ + "https://www.proofpoint.com/sites/default/files/sendmail-license.pdf", + "https://web.archive.org/web/20181003101040/https://www.proofpoint.com/sites/default/files/sendmail-license.pdf" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/SGI-B-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/SGI-B-1.0.json", + "referenceNumber": 122, + "name": "SGI Free Software License B v1.0", + "licenseId": "SGI-B-1.0", + "seeAlso": [ + "http://oss.sgi.com/projects/FreeB/SGIFreeSWLicB.1.0.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/SGI-B-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/SGI-B-1.1.json", + "referenceNumber": 330, + "name": "SGI Free Software License B v1.1", + "licenseId": "SGI-B-1.1", + "seeAlso": [ + "http://oss.sgi.com/projects/FreeB/" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/SGI-B-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/SGI-B-2.0.json", + "referenceNumber": 278, + "name": "SGI Free Software License B v2.0", + "licenseId": "SGI-B-2.0", + "seeAlso": [ + "http://oss.sgi.com/projects/FreeB/SGIFreeSWLicB.2.0.pdf" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/SGP4.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/SGP4.json", + "referenceNumber": 520, + "name": "SGP4 Permission Notice", + "licenseId": "SGP4", + "seeAlso": [ + "https://celestrak.org/publications/AIAA/2006-6753/faq.php" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/SHL-0.5.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/SHL-0.5.json", + "referenceNumber": 511, + "name": "Solderpad Hardware License v0.5", + "licenseId": "SHL-0.5", + "seeAlso": [ + "https://solderpad.org/licenses/SHL-0.5/" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/SHL-0.51.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/SHL-0.51.json", + "referenceNumber": 492, + "name": "Solderpad Hardware License, Version 0.51", + "licenseId": "SHL-0.51", + "seeAlso": [ + "https://solderpad.org/licenses/SHL-0.51/" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/SimPL-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/SimPL-2.0.json", + "referenceNumber": 387, + "name": "Simple Public License 2.0", + "licenseId": "SimPL-2.0", + "seeAlso": [ + "https://opensource.org/licenses/SimPL-2.0" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/SISSL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/SISSL.json", + "referenceNumber": 186, + "name": "Sun Industry Standards Source License v1.1", + "licenseId": "SISSL", + "seeAlso": [ + "http://www.openoffice.org/licenses/sissl_license.html", + "https://opensource.org/licenses/SISSL" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/SISSL-1.2.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/SISSL-1.2.json", + "referenceNumber": 267, + "name": "Sun Industry Standards Source License v1.2", + "licenseId": "SISSL-1.2", + "seeAlso": [ + "http://gridscheduler.sourceforge.net/Gridengine_SISSL_license.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Sleepycat.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Sleepycat.json", + "referenceNumber": 162, + "name": "Sleepycat License", + "licenseId": "Sleepycat", + "seeAlso": [ + "https://opensource.org/licenses/Sleepycat" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/SMLNJ.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/SMLNJ.json", + "referenceNumber": 243, + "name": "Standard ML of New Jersey License", + "licenseId": "SMLNJ", + "seeAlso": [ + "https://www.smlnj.org/license.html" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/SMPPL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/SMPPL.json", + "referenceNumber": 399, + "name": "Secure Messaging Protocol Public License", + "licenseId": "SMPPL", + "seeAlso": [ + "https://github.com/dcblake/SMP/blob/master/Documentation/License.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/SNIA.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/SNIA.json", + "referenceNumber": 334, + "name": "SNIA Public License 1.1", + "licenseId": "SNIA", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/SNIA_Public_License" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/snprintf.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/snprintf.json", + "referenceNumber": 142, + "name": "snprintf License", + "licenseId": "snprintf", + "seeAlso": [ + "https://github.com/openssh/openssh-portable/blob/master/openbsd-compat/bsd-snprintf.c#L2" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Spencer-86.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Spencer-86.json", + "referenceNumber": 311, + "name": "Spencer License 86", + "licenseId": "Spencer-86", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Henry_Spencer_Reg-Ex_Library_License" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Spencer-94.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Spencer-94.json", + "referenceNumber": 394, + "name": "Spencer License 94", + "licenseId": "Spencer-94", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Henry_Spencer_Reg-Ex_Library_License", + "https://metacpan.org/release/KNOK/File-MMagic-1.30/source/COPYING#L28" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Spencer-99.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Spencer-99.json", + "referenceNumber": 164, + "name": "Spencer License 99", + "licenseId": "Spencer-99", + "seeAlso": [ + "http://www.opensource.apple.com/source/tcl/tcl-5/tcl/generic/regfronts.c" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/SPL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/SPL-1.0.json", + "referenceNumber": 441, + "name": "Sun Public License v1.0", + "licenseId": "SPL-1.0", + "seeAlso": [ + "https://opensource.org/licenses/SPL-1.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/SSH-OpenSSH.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/SSH-OpenSSH.json", + "referenceNumber": 481, + "name": "SSH OpenSSH license", + "licenseId": "SSH-OpenSSH", + "seeAlso": [ + "https://github.com/openssh/openssh-portable/blob/1b11ea7c58cd5c59838b5fa574cd456d6047b2d4/LICENCE#L10" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/SSH-short.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/SSH-short.json", + "referenceNumber": 151, + "name": "SSH short notice", + "licenseId": "SSH-short", + "seeAlso": [ + "https://github.com/openssh/openssh-portable/blob/1b11ea7c58cd5c59838b5fa574cd456d6047b2d4/pathnames.h", + "http://web.mit.edu/kolya/.f/root/athena.mit.edu/sipb.mit.edu/project/openssh/OldFiles/src/openssh-2.9.9p2/ssh-add.1", + "https://joinup.ec.europa.eu/svn/lesoll/trunk/italc/lib/src/dsa_key.cpp" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/SSPL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/SSPL-1.0.json", + "referenceNumber": 218, + "name": "Server Side Public License, v 1", + "licenseId": "SSPL-1.0", + "seeAlso": [ + "https://www.mongodb.com/licensing/server-side-public-license" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/StandardML-NJ.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/StandardML-NJ.json", + "referenceNumber": 299, + "name": "Standard ML of New Jersey License", + "licenseId": "StandardML-NJ", + "seeAlso": [ + "https://www.smlnj.org/license.html" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/SugarCRM-1.1.3.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/SugarCRM-1.1.3.json", + "referenceNumber": 363, + "name": "SugarCRM Public License v1.1.3", + "licenseId": "SugarCRM-1.1.3", + "seeAlso": [ + "http://www.sugarcrm.com/crm/SPL" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/SunPro.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/SunPro.json", + "referenceNumber": 495, + "name": "SunPro License", + "licenseId": "SunPro", + "seeAlso": [ + "https://github.com/freebsd/freebsd-src/blob/main/lib/msun/src/e_acosh.c", + "https://github.com/freebsd/freebsd-src/blob/main/lib/msun/src/e_lgammal.c" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/SWL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/SWL.json", + "referenceNumber": 180, + "name": "Scheme Widget Library (SWL) Software License Agreement", + "licenseId": "SWL", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/SWL" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Symlinks.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Symlinks.json", + "referenceNumber": 259, + "name": "Symlinks License", + "licenseId": "Symlinks", + "seeAlso": [ + "https://www.mail-archive.com/debian-bugs-rc@lists.debian.org/msg11494.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/TAPR-OHL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/TAPR-OHL-1.0.json", + "referenceNumber": 496, + "name": "TAPR Open Hardware License v1.0", + "licenseId": "TAPR-OHL-1.0", + "seeAlso": [ + "https://www.tapr.org/OHL" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/TCL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/TCL.json", + "referenceNumber": 125, + "name": "TCL/TK License", + "licenseId": "TCL", + "seeAlso": [ + "http://www.tcl.tk/software/tcltk/license.html", + "https://fedoraproject.org/wiki/Licensing/TCL" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/TCP-wrappers.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/TCP-wrappers.json", + "referenceNumber": 84, + "name": "TCP Wrappers License", + "licenseId": "TCP-wrappers", + "seeAlso": [ + "http://rc.quest.com/topics/openssh/license.php#tcpwrappers" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/TermReadKey.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/TermReadKey.json", + "referenceNumber": 489, + "name": "TermReadKey License", + "licenseId": "TermReadKey", + "seeAlso": [ + "https://github.com/jonathanstowe/TermReadKey/blob/master/README#L9-L10" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/TMate.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/TMate.json", + "referenceNumber": 36, + "name": "TMate Open Source License", + "licenseId": "TMate", + "seeAlso": [ + "http://svnkit.com/license.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/TORQUE-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/TORQUE-1.1.json", + "referenceNumber": 416, + "name": "TORQUE v2.5+ Software License v1.1", + "licenseId": "TORQUE-1.1", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/TORQUEv1.1" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/TOSL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/TOSL.json", + "referenceNumber": 426, + "name": "Trusster Open Source License", + "licenseId": "TOSL", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/TOSL" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/TPDL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/TPDL.json", + "referenceNumber": 432, + "name": "Time::ParseDate License", + "licenseId": "TPDL", + "seeAlso": [ + "https://metacpan.org/pod/Time::ParseDate#LICENSE" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/TPL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/TPL-1.0.json", + "referenceNumber": 221, + "name": "THOR Public License 1.0", + "licenseId": "TPL-1.0", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing:ThorPublicLicense" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/TTWL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/TTWL.json", + "referenceNumber": 403, + "name": "Text-Tabs+Wrap License", + "licenseId": "TTWL", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/TTWL", + "https://github.com/ap/Text-Tabs/blob/master/lib.modern/Text/Tabs.pm#L148" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/TU-Berlin-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/TU-Berlin-1.0.json", + "referenceNumber": 91, + "name": "Technische Universitaet Berlin License 1.0", + "licenseId": "TU-Berlin-1.0", + "seeAlso": [ + "https://github.com/swh/ladspa/blob/7bf6f3799fdba70fda297c2d8fd9f526803d9680/gsm/COPYRIGHT" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/TU-Berlin-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/TU-Berlin-2.0.json", + "referenceNumber": 326, + "name": "Technische Universitaet Berlin License 2.0", + "licenseId": "TU-Berlin-2.0", + "seeAlso": [ + "https://github.com/CorsixTH/deps/blob/fd339a9f526d1d9c9f01ccf39e438a015da50035/licences/libgsm.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/UCAR.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/UCAR.json", + "referenceNumber": 454, + "name": "UCAR License", + "licenseId": "UCAR", + "seeAlso": [ + "https://github.com/Unidata/UDUNITS-2/blob/master/COPYRIGHT" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/UCL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/UCL-1.0.json", + "referenceNumber": 414, + "name": "Upstream Compatibility License v1.0", + "licenseId": "UCL-1.0", + "seeAlso": [ + "https://opensource.org/licenses/UCL-1.0" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/Unicode-DFS-2015.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Unicode-DFS-2015.json", + "referenceNumber": 291, + "name": "Unicode License Agreement - Data Files and Software (2015)", + "licenseId": "Unicode-DFS-2015", + "seeAlso": [ + "https://web.archive.org/web/20151224134844/http://unicode.org/copyright.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Unicode-DFS-2016.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Unicode-DFS-2016.json", + "referenceNumber": 544, + "name": "Unicode License Agreement - Data Files and Software (2016)", + "licenseId": "Unicode-DFS-2016", + "seeAlso": [ + "https://www.unicode.org/license.txt", + "http://web.archive.org/web/20160823201924/http://www.unicode.org/copyright.html#License", + "http://www.unicode.org/copyright.html" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/Unicode-TOU.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Unicode-TOU.json", + "referenceNumber": 268, + "name": "Unicode Terms of Use", + "licenseId": "Unicode-TOU", + "seeAlso": [ + "http://web.archive.org/web/20140704074106/http://www.unicode.org/copyright.html", + "http://www.unicode.org/copyright.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/UnixCrypt.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/UnixCrypt.json", + "referenceNumber": 47, + "name": "UnixCrypt License", + "licenseId": "UnixCrypt", + "seeAlso": [ + "https://foss.heptapod.net/python-libs/passlib/-/blob/branch/stable/LICENSE#L70", + "https://opensource.apple.com/source/JBoss/JBoss-737/jboss-all/jetty/src/main/org/mortbay/util/UnixCrypt.java.auto.html", + "https://archive.eclipse.org/jetty/8.0.1.v20110908/xref/org/eclipse/jetty/http/security/UnixCrypt.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Unlicense.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Unlicense.json", + "referenceNumber": 137, + "name": "The Unlicense", + "licenseId": "Unlicense", + "seeAlso": [ + "https://unlicense.org/" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/UPL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/UPL-1.0.json", + "referenceNumber": 204, + "name": "Universal Permissive License v1.0", + "licenseId": "UPL-1.0", + "seeAlso": [ + "https://opensource.org/licenses/UPL" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/Vim.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Vim.json", + "referenceNumber": 526, + "name": "Vim License", + "licenseId": "Vim", + "seeAlso": [ + "http://vimdoc.sourceforge.net/htmldoc/uganda.html" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/VOSTROM.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/VOSTROM.json", + "referenceNumber": 6, + "name": "VOSTROM Public License for Open Source", + "licenseId": "VOSTROM", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/VOSTROM" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/VSL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/VSL-1.0.json", + "referenceNumber": 153, + "name": "Vovida Software License v1.0", + "licenseId": "VSL-1.0", + "seeAlso": [ + "https://opensource.org/licenses/VSL-1.0" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/W3C.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/W3C.json", + "referenceNumber": 335, + "name": "W3C Software Notice and License (2002-12-31)", + "licenseId": "W3C", + "seeAlso": [ + "http://www.w3.org/Consortium/Legal/2002/copyright-software-20021231.html", + "https://opensource.org/licenses/W3C" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/W3C-19980720.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/W3C-19980720.json", + "referenceNumber": 408, + "name": "W3C Software Notice and License (1998-07-20)", + "licenseId": "W3C-19980720", + "seeAlso": [ + "http://www.w3.org/Consortium/Legal/copyright-software-19980720.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/W3C-20150513.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/W3C-20150513.json", + "referenceNumber": 9, + "name": "W3C Software Notice and Document License (2015-05-13)", + "licenseId": "W3C-20150513", + "seeAlso": [ + "https://www.w3.org/Consortium/Legal/2015/copyright-software-and-document" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/w3m.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/w3m.json", + "referenceNumber": 32, + "name": "w3m License", + "licenseId": "w3m", + "seeAlso": [ + "https://github.com/tats/w3m/blob/master/COPYING" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Watcom-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Watcom-1.0.json", + "referenceNumber": 185, + "name": "Sybase Open Watcom Public License 1.0", + "licenseId": "Watcom-1.0", + "seeAlso": [ + "https://opensource.org/licenses/Watcom-1.0" + ], + "isOsiApproved": true, + "isFsfLibre": false + }, + { + "reference": "https://spdx.org/licenses/Widget-Workshop.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Widget-Workshop.json", + "referenceNumber": 364, + "name": "Widget Workshop License", + "licenseId": "Widget-Workshop", + "seeAlso": [ + "https://github.com/novnc/noVNC/blob/master/core/crypto/des.js#L24" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Wsuipa.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Wsuipa.json", + "referenceNumber": 440, + "name": "Wsuipa License", + "licenseId": "Wsuipa", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Wsuipa" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/WTFPL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/WTFPL.json", + "referenceNumber": 513, + "name": "Do What The F*ck You Want To Public License", + "licenseId": "WTFPL", + "seeAlso": [ + "http://www.wtfpl.net/about/", + "http://sam.zoy.org/wtfpl/COPYING" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/wxWindows.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/wxWindows.json", + "referenceNumber": 57, + "name": "wxWindows Library License", + "licenseId": "wxWindows", + "seeAlso": [ + "https://opensource.org/licenses/WXwindows" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/X11.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/X11.json", + "referenceNumber": 503, + "name": "X11 License", + "licenseId": "X11", + "seeAlso": [ + "http://www.xfree86.org/3.3.6/COPYRIGHT2.html#3" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/X11-distribute-modifications-variant.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/X11-distribute-modifications-variant.json", + "referenceNumber": 288, + "name": "X11 License Distribution Modification Variant", + "licenseId": "X11-distribute-modifications-variant", + "seeAlso": [ + "https://github.com/mirror/ncurses/blob/master/COPYING" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Xdebug-1.03.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Xdebug-1.03.json", + "referenceNumber": 127, + "name": "Xdebug License v 1.03", + "licenseId": "Xdebug-1.03", + "seeAlso": [ + "https://github.com/xdebug/xdebug/blob/master/LICENSE" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Xerox.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Xerox.json", + "referenceNumber": 179, + "name": "Xerox License", + "licenseId": "Xerox", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Xerox" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Xfig.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Xfig.json", + "referenceNumber": 239, + "name": "Xfig License", + "licenseId": "Xfig", + "seeAlso": [ + "https://github.com/Distrotech/transfig/blob/master/transfig/transfig.c", + "https://fedoraproject.org/wiki/Licensing:MIT#Xfig_Variant", + "https://sourceforge.net/p/mcj/xfig/ci/master/tree/src/Makefile.am" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/XFree86-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/XFree86-1.1.json", + "referenceNumber": 138, + "name": "XFree86 License 1.1", + "licenseId": "XFree86-1.1", + "seeAlso": [ + "http://www.xfree86.org/current/LICENSE4.html" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/xinetd.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/xinetd.json", + "referenceNumber": 312, + "name": "xinetd License", + "licenseId": "xinetd", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Xinetd_License" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/xlock.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/xlock.json", + "referenceNumber": 343, + "name": "xlock License", + "licenseId": "xlock", + "seeAlso": [ + "https://fossies.org/linux/tiff/contrib/ras/ras2tif.c" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Xnet.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Xnet.json", + "referenceNumber": 119, + "name": "X.Net License", + "licenseId": "Xnet", + "seeAlso": [ + "https://opensource.org/licenses/Xnet" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/xpp.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/xpp.json", + "referenceNumber": 407, + "name": "XPP License", + "licenseId": "xpp", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/xpp" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/XSkat.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/XSkat.json", + "referenceNumber": 43, + "name": "XSkat License", + "licenseId": "XSkat", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/XSkat_License" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/YPL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/YPL-1.0.json", + "referenceNumber": 75, + "name": "Yahoo! Public License v1.0", + "licenseId": "YPL-1.0", + "seeAlso": [ + "http://www.zimbra.com/license/yahoo_public_license_1.0.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/YPL-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/YPL-1.1.json", + "referenceNumber": 215, + "name": "Yahoo! Public License v1.1", + "licenseId": "YPL-1.1", + "seeAlso": [ + "http://www.zimbra.com/license/yahoo_public_license_1.1.html" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/Zed.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Zed.json", + "referenceNumber": 532, + "name": "Zed License", + "licenseId": "Zed", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Zed" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Zend-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Zend-2.0.json", + "referenceNumber": 374, + "name": "Zend License v2.0", + "licenseId": "Zend-2.0", + "seeAlso": [ + "https://web.archive.org/web/20130517195954/http://www.zend.com/license/2_00.txt" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/Zimbra-1.3.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Zimbra-1.3.json", + "referenceNumber": 107, + "name": "Zimbra Public License v1.3", + "licenseId": "Zimbra-1.3", + "seeAlso": [ + "http://web.archive.org/web/20100302225219/http://www.zimbra.com/license/zimbra-public-license-1-3.html" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/Zimbra-1.4.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Zimbra-1.4.json", + "referenceNumber": 121, + "name": "Zimbra Public License v1.4", + "licenseId": "Zimbra-1.4", + "seeAlso": [ + "http://www.zimbra.com/legal/zimbra-public-license-1-4" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Zlib.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Zlib.json", + "referenceNumber": 70, + "name": "zlib License", + "licenseId": "Zlib", + "seeAlso": [ + "http://www.zlib.net/zlib_license.html", + "https://opensource.org/licenses/Zlib" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/zlib-acknowledgement.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/zlib-acknowledgement.json", + "referenceNumber": 362, + "name": "zlib/libpng License with Acknowledgement", + "licenseId": "zlib-acknowledgement", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/ZlibWithAcknowledgement" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/ZPL-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/ZPL-1.1.json", + "referenceNumber": 498, + "name": "Zope Public License 1.1", + "licenseId": "ZPL-1.1", + "seeAlso": [ + "http://old.zope.org/Resources/License/ZPL-1.1" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/ZPL-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/ZPL-2.0.json", + "referenceNumber": 83, + "name": "Zope Public License 2.0", + "licenseId": "ZPL-2.0", + "seeAlso": [ + "http://old.zope.org/Resources/License/ZPL-2.0", + "https://opensource.org/licenses/ZPL-2.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/ZPL-2.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/ZPL-2.1.json", + "referenceNumber": 101, + "name": "Zope Public License 2.1", + "licenseId": "ZPL-2.1", + "seeAlso": [ + "http://old.zope.org/Resources/ZPL/" + ], + "isOsiApproved": true, + "isFsfLibre": true + } + ], + "releaseDate": "2023-06-18" } \ No newline at end of file diff --git a/bioimageio/spec/static/tag_categories.json b/bioimageio/spec/static/tag_categories.json new file mode 100644 index 000000000..a84ea805b --- /dev/null +++ b/bioimageio/spec/static/tag_categories.json @@ -0,0 +1,64 @@ +{ + "model": { + "modality": [ + "electron-microscopy", + "cryo-electron-microscopy", + "fluorescence-light-microscopy", + "transmission-light-microscopy", + "super-resolution-microscopy", + "x-ray-microscopy", + "force-microscopy", + "high-content-imaging", + "whole-slide-imaging", + "other" + ], + "dims": [ + "2d", + "3d", + "2d-t", + "3d-t" + ], + "content": [ + "cells", + "nuclei", + "extracellular-vesicles", + "tissue", + "plant", + "mitochondria", + "vasculature", + "cell-membrane", + "brain", + "whole-organism" + ], + "framework": [ + "tensorflow", + "pytorch", + "tensorflow.js" + ], + "software": [ + "ilastik", + "imagej", + "fiji", + "imjoy", + "deepimagej", + "n2v" + ], + "network": [ + "unet", + "densenet", + "resnet", + "inception", + "shufflenet" + ], + "task": [ + "semantic-segmentation", + "instance-segmentation", + "object-detection", + "image-classification", + "denoising", + "image-restoration", + "image-reconstruction", + "in-silico-labeling" + ] + } +} \ No newline at end of file diff --git a/bioimageio/spec/summary.py b/bioimageio/spec/summary.py new file mode 100644 index 000000000..94d97c5e1 --- /dev/null +++ b/bioimageio/spec/summary.py @@ -0,0 +1,229 @@ +from itertools import chain +from types import MappingProxyType +from typing import Any, Iterable, List, Literal, Mapping, Tuple, Union + +from pydantic import ( + BaseModel, + Field, + model_validator, +) +from pydantic_core.core_schema import ErrorType +from typing_extensions import TypedDict, assert_never + +from ._internal.constants import VERSION +from ._internal.warning_levels import ( + ALERT, + ALERT_NAME, + ERROR, + ERROR_NAME, + INFO, + INFO_NAME, + WARNING, + WARNING_NAME, + WarningLevel, + WarningSeverity, +) + +Loc = Tuple[Union[int, str], ...] +"""location of error/warning in a nested data structure""" + +WarningSeverityName = Literal["info", "warning", "alert"] +WarningLevelName = Literal[WarningSeverityName, "error"] + +WARNING_SEVERITY_TO_NAME: Mapping[WarningSeverity, WarningSeverityName] = ( + MappingProxyType({INFO: INFO_NAME, WARNING: WARNING_NAME, ALERT: ALERT_NAME}) +) +WARNING_LEVEL_TO_NAME: Mapping[WarningLevel, WarningLevelName] = MappingProxyType( + {**WARNING_SEVERITY_TO_NAME, ERROR: ERROR_NAME} +) +WARNING_NAME_TO_LEVEL: Mapping[WarningLevelName, WarningLevel] = MappingProxyType( + {v: k for k, v in WARNING_LEVEL_TO_NAME.items()} +) + + +class ValidationEntry(BaseModel): + loc: Loc + msg: str + type: Union[ErrorType, str] + + +class ErrorEntry(ValidationEntry): + traceback: List[str] = Field(default_factory=list) + + +class WarningEntry(ValidationEntry): + severity: WarningSeverity = WARNING + severity_name: WarningSeverityName = WARNING_NAME + + @model_validator(mode="before") + @classmethod + def sync_severity_with_severity_name( + cls, data: Union[Mapping[Any, Any], Any] + ) -> Any: + if isinstance(data, dict): + data = dict(data) + assert isinstance(data, dict) + if ( + "severity" in data + and "severity_name" not in data + and data["severity"] in WARNING_SEVERITY_TO_NAME + ): + data["severity_name"] = WARNING_SEVERITY_TO_NAME[data["severity"]] + + if ( + "severity" in data + and "severity_name" not in data + and data["severity"] in WARNING_SEVERITY_TO_NAME + ): + data["severity"] = WARNING_NAME_TO_LEVEL[data["severity_name"]] + + return data + + +def format_loc(loc: Loc) -> str: + if not loc: + loc = ("__root__",) + + loc_str = ".".join(f"({x})" if x[0].isupper() else x for x in map(str, loc)) + + # additional field validation can make the location information quite convoluted, e.g. + # `weights.pytorch_state_dict.dependencies.source.function-after[validate_url_ok(), url['http','https']]` Input should be a valid URL, relative URL without a base + # therefore we remove the `.function-after[validate_url_ok(), url['http','https']]` here + brief_loc_str, *_ = loc_str.split(".function-after") + return f"`{brief_loc_str}`" + + +class InstalledPackage(TypedDict): + name: str + version: str + + +class ValidationDetail(BaseModel, extra="allow"): + name: str + status: Literal["passed", "failed"] + errors: List[ErrorEntry] = Field(default_factory=list) + warnings: List[WarningEntry] = Field(default_factory=list) + + def __str__(self): + return f"{self.__class__.__name__}:\n" + self.format() + + @property + def status_icon(self): + if self.status == "passed": + return "โœ”๏ธ" + else: + return "โŒ" + + def format(self, hide_tracebacks: bool = False, root_loc: Loc = ()) -> str: + indent = " " if root_loc else "" + errs_wrns = self._format_errors_and_warnings( + hide_tracebacks=hide_tracebacks, root_loc=root_loc + ) + return f"{indent}{self.status_icon} {self.name.strip('.')}: {self.status}{errs_wrns}" + + def _format_errors_and_warnings(self, hide_tracebacks: bool, root_loc: Loc): + indent = " " if root_loc else "" + if hide_tracebacks: + tbs = [""] * len(self.errors) + else: + tbs = [ + ("\n Traceback:\n " if e.traceback else "") + + "\n ".join(e.traceback) + for e in self.errors + ] + + def join_parts(parts: Iterable[Tuple[str, str]]): + last_loc = None + lines: List[str] = [] + for loc, msg in parts: + if loc == last_loc: + lines.append(f"\n {loc} {msg}") + else: + lines.append(f"\n- {loc} {msg}") + + last_loc = loc + + return "".join(lines) + + es = join_parts( + (format_loc(root_loc + e.loc), f"{e.msg}{tb}") + for e, tb in zip(self.errors, tbs) + ) + ws = join_parts((format_loc(root_loc + w.loc), w.msg) for w in self.warnings) + + return ( + f"\n{indent}errors:\n{es}" + if es + else "" + f"\n{indent}warnings:\n{ws}" if ws else "" + ) + + +class ValidationSummary(BaseModel, extra="allow"): + name: str + source_name: str + status: Literal["passed", "failed"] + details: List[ValidationDetail] + env: List[InstalledPackage] = Field( + default_factory=lambda: [ + InstalledPackage(name="bioimageio.spec", version=VERSION) + ] + ) + """list of selected, relevant package versions""" + + @property + def status_icon(self): + if self.status == "passed": + return "โœ”๏ธ" + else: + return "โŒ" + + @property + def errors(self) -> List[ErrorEntry]: + return list(chain.from_iterable(d.errors for d in self.details)) + + @property + def warnings(self) -> List[WarningEntry]: + return list(chain.from_iterable(d.warnings for d in self.details)) + + def __str__(self): + return f"{self.__class__.__name__}:\n" + self.format() + + def _format_env(self): + if not self.env: + return "" + + package_w = max(len(p) for p in [e["name"] for e in self.env] + ["package"]) + version_w = max(len(v) for v in [e["version"] for e in self.env] + ["version"]) + + return ( + "\n" + f"| {'package'.center(package_w)} | {'version'.center(version_w)} |\n" + f"| {'---'.center(package_w)} | {'---'.center(version_w)} |\n" + ) + "".join( + f"| {e['name'].ljust(package_w)} | {e['version'].ljust(version_w)} |\n" + for e in self.env + ) + + def format( + self, + hide_tracebacks: bool = False, + hide_source: bool = False, + hide_env: bool = False, + root_loc: Loc = (), + ) -> str: + indent = " " if root_loc else "" + src = "" if hide_source else f"\n{indent}source: {self.source_name}" + env = "" if hide_env else self._format_env() + details = f"\n{indent}" + f"\n{indent}".join( + d.format(hide_tracebacks=hide_tracebacks, root_loc=root_loc) + for d in self.details + ) + return f"{indent}{self.status_icon} {self.name.strip('.')}: {self.status}{src}{env}{details}" + + def add_detail(self, detail: ValidationDetail): + if detail.status == "failed": + self.status = "failed" + elif detail.status != "passed": + assert_never(detail.status) + + self.details.append(detail) diff --git a/bioimageio/spec/utils.py b/bioimageio/spec/utils.py new file mode 100644 index 000000000..de45d966a --- /dev/null +++ b/bioimageio/spec/utils.py @@ -0,0 +1,3 @@ +from ._internal.io import download as download +from ._internal.io_utils import load_array as load_array +from ._internal.io_utils import save_array as save_array diff --git a/bioimageio/spec/v.py b/bioimageio/spec/v.py deleted file mode 100644 index f03752036..000000000 --- a/bioimageio/spec/v.py +++ /dev/null @@ -1,5 +0,0 @@ -import json -import pathlib - -with (pathlib.Path(__file__).parent / "VERSION").open() as f: - __version__ = json.load(f)["version"] diff --git a/conda-recipe/meta.yaml b/conda-recipe/meta.yaml index eb67a2131..9b4f6a6b2 100644 --- a/conda-recipe/meta.yaml +++ b/conda-recipe/meta.yaml @@ -1,3 +1,5 @@ +# possibly superseded by recipe.yaml +# TODO: decide for package builder and clean up {% set setup_py_data = load_setup_py_data() %} package: @@ -9,21 +11,17 @@ source: requirements: build: - - python >=3.7,<3.10 + - python >=3.8,<3.13 - pip run: - - python >=3.7,<3.10 + - python >=3.8,<3.13 {% for dep in setup_py_data['install_requires'] %} - - {{ dep.replace(";platform_system!='Emscripten'", "").lower() }} + - {{ dep.lower() }} {% endfor %} build: noarch: python - entry_points: - {% for ep in setup_py_data['entry_points']['console_scripts'] %} - - {{ ep }} - {% endfor %} script: - python -m pip install --no-deps --ignore-installed . @@ -32,16 +30,17 @@ test: - bioimageio.spec source_files: - tests - - example_specs + - example_descriptions requires: - {% for dep in setup_py_data['extras_require']['test'] %} + {% for dep in setup_py_data['extras_require']['dev'] %} - {{ dep.replace('torch', 'pytorch').lower() }} {% endfor %} + - pytest-xdist[psutil] # somehow not installed from setup_py_data commands: - - pytest . + - pytest -n auto --capture=no about: home: https://github.com/bioimage-io/spec-bioimage-io - summary: BioImage.IO Spec Validator and specifications package + summary: bioimage.io specifications package license: MIT license_file: LICENSE diff --git a/conda-recipe/recipe.yaml b/conda-recipe/recipe.yaml new file mode 100644 index 000000000..84256aba7 --- /dev/null +++ b/conda-recipe/recipe.yaml @@ -0,0 +1,60 @@ +package: + name: bioimageio.spec + version: "0.5.0" + +source: + path: .. + +requirements: + host: + # {HOST_DEPENDENCIES} + - pip + - python >=3.8,<3.13 + - setuptool + - wheel + run: + # {RUN_DEPENDENCIES} + - "annotated-types>=0.5.0" + - "email_validator" + - "imageio" + - "loguru" + - "numpy>=1.21" + - "packaging>=17.0" + - "pooch" + - "pydantic-settings" + - "pydantic>=2.6.3" + - email-validator + - "python-dateutil" + - "ruyaml" + - "tqdm" + - "typing-extensions" + +build: + noarch: python + script: python -m pip install --no-deps --ignore-installed . + +tests: + - script: + - pytest -n auto --capture=no + requirements: + run: + # {TEST_DEPENDENCIES} + - deepdiff + - filelock + - jsonschema + - pytest + - pytest-xdist + - psutil + files: + source: + - tests + - example_descriptions + - python: + imports: + - bioimageio.spec + +about: + repository: https://github.com/bioimage-io/spec-bioimage-io + summary: bioimage.io specifications package + license: MIT + license_file: LICENSE diff --git a/dev/env.yaml b/dev/env.yaml new file mode 100644 index 000000000..8e5520bda --- /dev/null +++ b/dev/env.yaml @@ -0,0 +1,21 @@ +name: biospec +channels: + - conda-forge + - defaults +dependencies: + - python=3.8 + - numpy + - deepdiff + - pydantic[version='>=2.0.1'] + - email-validator + - ruyaml + - packaging[version='>=17.0'] + - annotated-types + - python-dateutil + - black + - pyright + - pooch + - pre-commit + - typing-extensions + - pytest + - lxml diff --git a/example_descriptions/collections/partner_collection/bioimageio.yaml b/example_descriptions/collections/partner_collection/bioimageio.yaml new file mode 100644 index 000000000..66bf03c72 --- /dev/null +++ b/example_descriptions/collections/partner_collection/bioimageio.yaml @@ -0,0 +1,15 @@ +format_version: 0.2.4 +type: collection +name: Partner Collection +description: "Resources for BioImgage.IO curated by the partner team." +authors: + - name: Fynn Beuttenmueller + github_user: fynnbe +cite: + - text: bioimage.io + url: "https://www.biorxiv.org/content/10.1101/2022.06.07.495102v1" +tags: [bioimage.io, partner-software] +id: partner +license: MIT +collection: + - rdf_source: datasets/dummy-dataset/dummy_entry.yaml diff --git a/example_specs/collections/partner_collection/datasets/dummy-dataset/README.md b/example_descriptions/collections/partner_collection/datasets/dummy-dataset/README.md similarity index 100% rename from example_specs/collections/partner_collection/datasets/dummy-dataset/README.md rename to example_descriptions/collections/partner_collection/datasets/dummy-dataset/README.md diff --git a/example_specs/collections/partner_collection/datasets/dummy-dataset/rdf.yaml b/example_descriptions/collections/partner_collection/datasets/dummy-dataset/dummy_entry.yaml similarity index 60% rename from example_specs/collections/partner_collection/datasets/dummy-dataset/rdf.yaml rename to example_descriptions/collections/partner_collection/datasets/dummy-dataset/dummy_entry.yaml index 7cc9b8c47..4c324f9d7 100644 --- a/example_specs/collections/partner_collection/datasets/dummy-dataset/rdf.yaml +++ b/example_descriptions/collections/partner_collection/datasets/dummy-dataset/dummy_entry.yaml @@ -1,12 +1,8 @@ -authors: -- name: Fynn Beuttenmรผller -cite: [] -covers: [] description: Dummy dataset for testing purposes only. documentation: README.md -format_version: 0.2.2 -license: MIT +format_version: 0.2.4 name: Dummy Data tags: [dummy] type: dataset id: dummy-dataset +attachments: {} diff --git a/example_specs/collections/unet2d_nuclei_broad_coll/README.md b/example_descriptions/collections/unet2d_nuclei_broad_coll/README.md similarity index 100% rename from example_specs/collections/unet2d_nuclei_broad_coll/README.md rename to example_descriptions/collections/unet2d_nuclei_broad_coll/README.md diff --git a/example_descriptions/collections/unet2d_nuclei_broad_coll/bioimageio.yaml b/example_descriptions/collections/unet2d_nuclei_broad_coll/bioimageio.yaml new file mode 100644 index 000000000..26a29195a --- /dev/null +++ b/example_descriptions/collections/unet2d_nuclei_broad_coll/bioimageio.yaml @@ -0,0 +1,93 @@ +format_version: 0.2.4 +type: collection + +name: UNet 2D Nuclei Broad + +description: A 2d U-Net trained on the nuclei broad dataset. +authors: + - name: "Constantin Pape;@bioimage-io" + affiliation: "EMBL Heidelberg" + orcid: "0000-0001-6562-7187" + - name: "Fynn Beuttenmueller" + affiliation: "EMBL Heidelberg" + orcid: "0000-0002-8567-6389" +maintainers: + - name: "Constantin Pape" + github_user: constantinpape + +cite: + - text: "Ronneberger, Olaf et al. U-net: Convolutional networks for biomedical image segmentation. MICCAI 2015." + doi: 10.1007/978-3-319-24574-4_28 + - text: "2018 Data Science Bowl" + url: https://www.kaggle.com/c/data-science-bowl-2018 + +git_repo: https://github.com/bioimage-io/spec-bioimage-io/tree/main/example_specs/models/unet2d_nuclei_broad +# tags: [unet2d, pytorch, nucleus, segmentation, dsb2018] # tags is optional for collection +license: MIT + +documentation: README.md +covers: [cover0.png] +timestamp: 2019-12-11T12:22:32Z + +inputs: + - name: raw + description: raw input + axes: bcyx + data_type: float32 + data_range: ["-inf", "inf"] + shape: [1, 1, 512, 512] + preprocessing: + - name: zero_mean_unit_variance + kwargs: + mode: per_sample + axes: yx + +outputs: + - name: probability + description: probability in [0,1] + axes: bcyx + data_type: float32 + data_range: ["-inf", "inf"] + halo: [0, 0, 32, 32] + shape: + reference_tensor: raw + scale: [1.0, 1.0, 1.0, 1.0] + offset: [0.0, 0.0, 0.0, 0.0] + +test_inputs: [test_input.npy] +test_outputs: [test_output.npy] + +sample_inputs: [test_input.npy] +sample_outputs: [test_output.npy] + +version: 1 + +collection: + # - id: with_rdf_source_url + # rdf_source: https://raw.githubusercontent.com/bioimage-io/spec-bioimage-io/main/example_specs/models/unet2d_nuclei_broad/rdf.yaml + # name: UNet 2D Nuclei Broad (latest) + - id: in_place_0.4.10 + format_version: 0.4.10 + type: model + weights: + pytorch_state_dict: + dependencies: conda:environment.yaml + authors: + - name: "Constantin Pape;@bioimage-io" + affiliation: "EMBL Heidelberg" + orcid: "0000-0001-6562-7187" + sha256: e4d3885bccbe41cbf6c1d825f3cd2b707c7021ead5593156007e407a16b27cf2 + source: https://zenodo.org/records/3446812/files/unet2d_weights.torch + architecture: unet2d.py:UNet2d + architecture_sha256: 7cdd8332dc3e3735e71c328f81b63a9ac86c028f80522312484ca9a4027d4ce1 + kwargs: { input_channels: 1, output_channels: 1 } + onnx: + sha256: f1f086d5e340f9d4d7001a1b62a2b835f9b87a2fb5452c4fe7d8cc821bdf539c + source: weights.onnx + opset_version: 12 + parent: pytorch_state_dict + torchscript: + sha256: 62fa1c39923bee7d58a192277e0dd58f2da9ee810662addadd0f44a3784d9210 + source: weights.pt + parent: pytorch_state_dict + attachments: {} diff --git a/example_specs/collections/unet2d_nuclei_broad_coll/cover0.png b/example_descriptions/collections/unet2d_nuclei_broad_coll/cover0.png similarity index 100% rename from example_specs/collections/unet2d_nuclei_broad_coll/cover0.png rename to example_descriptions/collections/unet2d_nuclei_broad_coll/cover0.png diff --git a/example_specs/collections/unet2d_nuclei_broad_coll/environment.yaml b/example_descriptions/collections/unet2d_nuclei_broad_coll/environment.yaml similarity index 100% rename from example_specs/collections/unet2d_nuclei_broad_coll/environment.yaml rename to example_descriptions/collections/unet2d_nuclei_broad_coll/environment.yaml diff --git a/example_specs/collections/unet2d_nuclei_broad_coll/test_input.npy b/example_descriptions/collections/unet2d_nuclei_broad_coll/test_input.npy similarity index 100% rename from example_specs/collections/unet2d_nuclei_broad_coll/test_input.npy rename to example_descriptions/collections/unet2d_nuclei_broad_coll/test_input.npy diff --git a/example_specs/collections/unet2d_nuclei_broad_coll/test_output.npy b/example_descriptions/collections/unet2d_nuclei_broad_coll/test_output.npy similarity index 100% rename from example_specs/collections/unet2d_nuclei_broad_coll/test_output.npy rename to example_descriptions/collections/unet2d_nuclei_broad_coll/test_output.npy diff --git a/example_specs/models/unet2d_nuclei_broad/unet2d.py b/example_descriptions/collections/unet2d_nuclei_broad_coll/unet2d.py similarity index 72% rename from example_specs/models/unet2d_nuclei_broad/unet2d.py rename to example_descriptions/collections/unet2d_nuclei_broad_coll/unet2d.py index fedc9be10..7fba44dd5 100644 --- a/example_specs/models/unet2d_nuclei_broad/unet2d.py +++ b/example_descriptions/collections/unet2d_nuclei_broad_coll/unet2d.py @@ -1,3 +1,4 @@ +# type: ignore import torch import torch.nn as nn @@ -9,7 +10,9 @@ def __init__(self, scale_factor, mode="bilinear"): self.mode = mode def forward(self, input): - return nn.functional.interpolate(input, scale_factor=self.scale_factor, mode=self.mode, align_corners=False) + return nn.functional.interpolate( + input, scale_factor=self.scale_factor, mode=self.mode, align_corners=False + ) class UNet2d(nn.Module): @@ -20,14 +23,22 @@ def __init__(self, input_channels, output_channels, training=False): self.n_levels = 3 self.encoders = nn.ModuleList( - [self.conv_layer(self.input_channels, 16), self.conv_layer(16, 32), self.conv_layer(32, 64)] + [ + self.conv_layer(self.input_channels, 16), + self.conv_layer(16, 32), + self.conv_layer(32, 64), + ] ) self.downsamplers = nn.ModuleList([self.downsampler()] * self.n_levels) self.base = self.conv_layer(64, 128) - self.decoders = nn.ModuleList([self.conv_layer(128, 64), self.conv_layer(64, 32), self.conv_layer(32, 16)]) - self.upsamplers = nn.ModuleList([self.upsampler(128, 64), self.upsampler(64, 32), self.upsampler(32, 16)]) + self.decoders = nn.ModuleList( + [self.conv_layer(128, 64), self.conv_layer(64, 32), self.conv_layer(32, 16)] + ) + self.upsamplers = nn.ModuleList( + [self.upsampler(128, 64), self.upsampler(64, 32), self.upsampler(32, 16)] + ) self.output = nn.Conv2d(16, self.output_channels, 1) self.training = training @@ -58,7 +69,9 @@ def forward(self, input): x = self.base(x) - for decoder, sampler, enc in zip(self.decoders, self.upsamplers, from_encoder[::-1]): + for decoder, sampler, enc in zip( + self.decoders, self.upsamplers, from_encoder[::-1] + ): x = sampler(x) x = torch.cat([enc, x], dim=1) x = decoder(x) diff --git a/example_specs/collections/unet2d_nuclei_broad_coll/weights.onnx b/example_descriptions/collections/unet2d_nuclei_broad_coll/weights.onnx similarity index 100% rename from example_specs/collections/unet2d_nuclei_broad_coll/weights.onnx rename to example_descriptions/collections/unet2d_nuclei_broad_coll/weights.onnx diff --git a/example_specs/collections/unet2d_nuclei_broad_coll/weights.pt b/example_descriptions/collections/unet2d_nuclei_broad_coll/weights.pt similarity index 100% rename from example_specs/collections/unet2d_nuclei_broad_coll/weights.pt rename to example_descriptions/collections/unet2d_nuclei_broad_coll/weights.pt diff --git a/example_descriptions/datasets/covid_if_training_data/bioimageio.yaml b/example_descriptions/datasets/covid_if_training_data/bioimageio.yaml new file mode 100644 index 000000000..2bb0fb6a0 --- /dev/null +++ b/example_descriptions/datasets/covid_if_training_data/bioimageio.yaml @@ -0,0 +1,26 @@ +type: dataset +format_version: 0.2.4 +authors: + - name: Constantin Pape + github_user: constantinpape +cite: + - doi: "10.1002/bies.202000257" + text: "Pape, Remme et al." +covers: + - "https://raw.githubusercontent.com/ilastik/bioimage-io-models/main/dataset_src/covid-if-cover0.jpg" + - "https://raw.githubusercontent.com/ilastik/bioimage-io-models/main/dataset_src/covid-if-cover1.jpg" + - "https://raw.githubusercontent.com/ilastik/bioimage-io-models/main/dataset_src/covid-if-cover2.jpg" +description: Training data for cell and nucleus segmentation as well as infection classification in IF data of Covid-19 infected cells. +documentation: https://raw.githubusercontent.com/ilastik/bioimage-io-models/main/dataset_src/covid-if.md +license: CC-BY-4.0 +name: Covid-IF Training Data +source: https://zenodo.org/records/5092850 +tags: + - high-content-imaging + - fluorescence-light-microscopy + - 2D + - cells + - nuclei + - covid19 + - semantic-segmentation + - instance-segmentation diff --git a/example_specs/rdf-examples.md b/example_descriptions/examples.md similarity index 87% rename from example_specs/rdf-examples.md rename to example_descriptions/examples.md index 0465aed54..29c8bd477 100644 --- a/example_specs/rdf-examples.md +++ b/example_descriptions/examples.md @@ -1,12 +1,14 @@ # Examples for describing the -### Describing applications -The RDF can be used to describe applications. To do so set the `type` field to `application`.\ +## Describing applications + +A bioimageio.yaml file can be used to describe applications. To do so set the `type` field to `application`.\ For regular software package with a downloadable file, you can set `download_url` to the downloadable file, for example, you can upload the executable files as Github release, deposit it on Zenodo, or even generate a sharable url from Dropbox/Google Drive.\ For web application, set `source` to the url of the web application. Users can then click and redirect to your web application. However, simple integration will not support features such as opening dataset or models with your application. -It is recommended to build BioEngine Apps such that users can directly try and use them in BioImage.IO. See [here](https://github.com/bioimage-io/bioimage.io/blob/main/docs/bioengine_apps/build-bioengine-apps.md) for more details.\ +It is recommended to build BioEngine Apps such that users can directly try and use them in bioimage.io. See [here](https://github.com/bioimage-io/bioimage.io/blob/main/docs/bioengine_apps/build-bioengine-apps.md) for more details.\ Below is an example for [Kaibu](https://kaibu.org), which is a BioEngine/ImJoy compatible web application: + ```yaml id: kaibu name: Kaibu @@ -23,12 +25,14 @@ badges: label: Launch ImJoy url: https://imjoy.io/#/app?plugin=https://kaibu.org/#/app ``` + For more application examples, see the [manifest for ImJoy](https://github.com/imjoy-team/bioimage-io-models/blob/master/manifest.bioimage.io.yaml). -### Describing notebooks and scripts +## Describing notebooks and scripts Jupyter notebooks, Google Colab or other types of executable notebooks or scripts are considered as applications, therefore, you should use `type=application` and add additional tags. For example: -``` + +```yaml - type: application id: Notebook_fnet_3D_ZeroCostDL4Mic name: Label-free Prediction - fnet - (3D) ZeroCostDL4Mic @@ -36,10 +40,10 @@ Jupyter notebooks, Google Colab or other types of executable notebooks or script cite: - text: "von Chamier, L., Laine, R.F., Jukkala, J. et al. Democratising deep learning for microscopy with ZeroCostDL4Mic. Nat Commun 12, 2276 (2021). https://doi.org/10.1038/s41467-021-22518-0" doi: https://doi.org/10.1038/s41467-021-22518-0 - + - text: "Ounkomol, C., Seshamani, S., Maleckar, M.M. et al. Label-free prediction of three-dimensional fluorescence images from transmitted-light microscopy. Nat Methods 15, 917โ€“920 (2018). https://doi.org/10.1038/s41592-018-0111-2" doi: https://doi.org/10.1038/s41592-018-0111-2 - + authors: - Lucas von Chamier and the ZeroCostDL4Mic Team covers: @@ -58,26 +62,27 @@ Jupyter notebooks, Google Colab or other types of executable notebooks or script - Dataset_fnet_3D_ZeroCostDL4Mic ``` - ### Describing datasets and other types + The RDF allows for the description of datasets (type=`dataset`) and other potential resources, you can use set `source` and/or `download_url` to point to the resource, or use `attachments` to specify a list of associated files. For examples, see entries `dataset`/`notebook` in the [ZeroCostDL4Mic](https://github.com/HenriquesLab/ZeroCostDL4Mic/blob/master/manifest.bioimage.io.yaml) collection. - ### Describing models with the unspecific RDF(not recommended, use the Model RDF instead) -In general, it is discouraged to use the general RDF to describe AI models and we recommend to follow the [model RDF spec](#model-resource-description-file-specification) instead. However, in some cases, it is not possible to provide detailed fields defined in the [model RDF spec](#model-resource-description-file-specification), the general RDF can be used for discribing AI models. + +In general, it is discouraged to use the general RDF to describe AI models and we recommend to follow the [model spec](#model-resource-description-file-specification) instead. However, in some cases, it is not possible to provide detailed fields defined in the [model spec](#model-resource-description-file-specification), the general RDF can be used for discribing AI models. To do that, you need to first set the `type` field to `model`.\ -A basic integration would be simply provide a `download_url` to a zip file (for example, with the model weights, source code or executable binary file) hosted on Github releases, Dropbox, Google Drive etc. For example: +A basic integration would be simply provide a `download_url` to a zip file (for example, with the model weights, source code or executable binary file) hosted on Github releases, Dropbox, Google Drive etc. For example: + ```yaml download_url: https://zenodo.org/record/3446812/files/unet2d_weights.torch?download=1 ``` If the model is available as a github repo, then provide the `git_repo` field: + ```yaml git_repo: https://github.com/my/model... ``` -Here an example of a general RDF describing a model (not recommended): -https://github.com/CellProfiling/HPA-model-zoo/blob/2f668d87defddc6c7cd156259a8be4146b665e72/manifest.bioimage.io.yaml#L33-L59 - +Here an example of a general RDF describing a model (not recommended): + diff --git a/example_specs/models/hpa-densenet/README.md b/example_descriptions/models/hpa-densenet/README.md similarity index 100% rename from example_specs/models/hpa-densenet/README.md rename to example_descriptions/models/hpa-densenet/README.md diff --git a/example_specs/models/hpa-densenet/bestfitting-densenet-diagram.png b/example_descriptions/models/hpa-densenet/bestfitting-densenet-diagram.png similarity index 100% rename from example_specs/models/hpa-densenet/bestfitting-densenet-diagram.png rename to example_descriptions/models/hpa-densenet/bestfitting-densenet-diagram.png diff --git a/example_specs/models/hpa-densenet/densenet_model.onnx b/example_descriptions/models/hpa-densenet/densenet_model.onnx similarity index 100% rename from example_specs/models/hpa-densenet/densenet_model.onnx rename to example_descriptions/models/hpa-densenet/densenet_model.onnx diff --git a/example_specs/models/hpa-densenet/hpa_labels.txt b/example_descriptions/models/hpa-densenet/hpa_labels.txt similarity index 100% rename from example_specs/models/hpa-densenet/hpa_labels.txt rename to example_descriptions/models/hpa-densenet/hpa_labels.txt diff --git a/example_specs/models/hpa-densenet/rdf.yaml b/example_descriptions/models/hpa-densenet/rdf.yaml similarity index 72% rename from example_specs/models/hpa-densenet/rdf.yaml rename to example_descriptions/models/hpa-densenet/rdf.yaml index a43c9c15e..bb23b6676 100644 --- a/example_specs/models/hpa-densenet/rdf.yaml +++ b/example_descriptions/models/hpa-densenet/rdf.yaml @@ -1,22 +1,25 @@ attachments: files: - - ./hpa_labels.txt + - hpa_labels.txt authors: - name: Shubin Dai +maintainers: + - github_user: oeway + name: Wei Ouyang cite: - - doi: https://doi.org/10.1038/s41592-019-0658-6 + - doi: 10.1038/s41592-019-0658-6 text: >- Ouyang, W., Winsnes, C.F., Hjelmare, M. et al. Analysis of the Human Protein Atlas Image Classification competition. Nat Methods 16, 1254โ€“1261 (2019). covers: - - ./bestfitting-densenet-diagram.png + - bestfitting-densenet-diagram.png description: The winning model of HPA image classification 2019 by Bestfitting -documentation: ./README.md +documentation: README.md format_version: 0.4.0 inputs: - axes: bcyx - data_range: [-inf, inf] + data_range: [-.inf, .inf] data_type: float32 description: HPA image (jpeg or png) name: image @@ -24,20 +27,20 @@ inputs: - kwargs: axes: xy gain: [0.003921568627, 0.003921568627, 0.003921568627, 0.003921568627] - offset: [0, 0, 0, 0] + offset: [0.0, 0.0, 0.0, 0.0] name: scale_linear - shape: [1,4 ,1024 ,1024] + shape: [1, 4, 1024, 1024] license: MIT name: HPA Bestfitting Densenet outputs: - axes: bc - data_range: [0, inf] + data_range: [0, .inf] data_type: float32 description: protein localization pattern classes name: classes shape: [1, 28] - axes: bc - data_range: [-inf, inf] + data_range: [-.inf, .inf] data_type: float32 description: features of last fully connected layer name: features @@ -50,11 +53,11 @@ tags: - cells - protein-localization test_inputs: - - ./test_input.npy + - test_input.npy test_outputs: - - ./test_output_classes.npy - - ./test_output_features.npy -timestamp: '2021-09-21T12:00:12.000Z' + - test_output_classes.npy + - test_output_features.npy +timestamp: "2021-09-21T12:00:12.000Z" type: model version: 0.1.1 weights: @@ -62,6 +65,6 @@ weights: authors: - name: Wei OUYANG sha256: 0ef125632b0058b2f04448de56dc23339fcbfe4f628601e836e17cef6f344e3f - source: ./densenet_model.onnx + source: densenet_model.onnx links: - imjoy/BioImageIO-Packager diff --git a/example_specs/models/hpa-densenet/test_input.npy b/example_descriptions/models/hpa-densenet/test_input.npy similarity index 100% rename from example_specs/models/hpa-densenet/test_input.npy rename to example_descriptions/models/hpa-densenet/test_input.npy diff --git a/example_specs/models/hpa-densenet/test_output_classes.npy b/example_descriptions/models/hpa-densenet/test_output_classes.npy similarity index 100% rename from example_specs/models/hpa-densenet/test_output_classes.npy rename to example_descriptions/models/hpa-densenet/test_output_classes.npy diff --git a/example_specs/models/hpa-densenet/test_output_features.npy b/example_descriptions/models/hpa-densenet/test_output_features.npy similarity index 100% rename from example_specs/models/hpa-densenet/test_output_features.npy rename to example_descriptions/models/hpa-densenet/test_output_features.npy diff --git a/example_specs/models/stardist_example_model/README.md b/example_descriptions/models/stardist_example_model/README.md similarity index 100% rename from example_specs/models/stardist_example_model/README.md rename to example_descriptions/models/stardist_example_model/README.md diff --git a/example_specs/models/stardist_example_model/TF_SavedModel.zip b/example_descriptions/models/stardist_example_model/TF_SavedModel.zip similarity index 100% rename from example_specs/models/stardist_example_model/TF_SavedModel.zip rename to example_descriptions/models/stardist_example_model/TF_SavedModel.zip diff --git a/example_descriptions/models/stardist_example_model/rdf_v0_4.yaml b/example_descriptions/models/stardist_example_model/rdf_v0_4.yaml new file mode 100644 index 000000000..f3a28bd5e --- /dev/null +++ b/example_descriptions/models/stardist_example_model/rdf_v0_4.yaml @@ -0,0 +1,60 @@ +type: model +format_version: 0.4.0 +authors: + - name: Uwe Schmidt + - name: Martin Weigert + github_user: maweigert +cite: + - text: Cell Detection with Star-Convex Polygons + url: "10.1007/978-3-030-00934-2_30" + - text: Star-convex Polyhedra for 3D Object Detection and Segmentation in Microscopy + url: "10.1109/WACV45572.2020.9093435" +name: StardistExampleModel +description: StarDist +documentation: README.md +covers: [stardist_logo.jpg] +dependencies: pip:requirements.txt +git_repo: https://github.com/stardist/stardist +inputs: + - axes: byxc + data_range: [-.inf, .inf] + data_type: "float32" + name: input + preprocessing: + - kwargs: + { + axes: yx, + max_percentile: 99.8, + min_percentile: 1.0, + mode: per_sample, + } + name: scale_range + shape: + min: [1, 80, 80, 1] + step: [0, 16, 16, 0] +license: CC-BY-NC-4.0 +outputs: + - axes: byxc + data_range: [-.inf, .inf] + data_type: float32 + halo: [0, 32, 32, 0] + name: output + shape: + offset: [0.0, 0.0, 0.0, 16.5] + reference_tensor: input + scale: [1.0, 1.0, 1.0, 0.0] +tags: + [stardist, segmentation, instance segmentation, object detection, tensorflow] +test_inputs: [test_input.npy] +test_outputs: [test_output0.npy] +timestamp: "2021-11-22T13:14:30.643565Z" +weights: + tensorflow_saved_model_bundle: + sha256: 444cbbbaa2267c999ae321cf66d77025284079684b6c203aa8b846b3c7c70376 + source: TF_SavedModel.zip + tensorflow_version: "1.15" + +config: + stardist: + stardist_version: 0.7.3 + thresholds: { nms: 0.3, prob: 0.479071463157368 } diff --git a/example_descriptions/models/stardist_example_model/rdf_wrong_shape2_v0_4.yaml b/example_descriptions/models/stardist_example_model/rdf_wrong_shape2_v0_4.yaml new file mode 100644 index 000000000..0315b3c9a --- /dev/null +++ b/example_descriptions/models/stardist_example_model/rdf_wrong_shape2_v0_4.yaml @@ -0,0 +1,57 @@ +authors: + - name: Uwe Schmidt + - name: Martin Weigert + github_user: maweigert +cite: + - text: Cell Detection with Star-Convex Polygons + doi: "10.1007/978-3-030-00934-2_30" + - text: Star-convex Polyhedra for 3D Object Detection and Segmentation in Microscopy + doi: "10.1109/WACV45572.2020.9093435" +config: + stardist: + stardist_version: 0.7.3 + thresholds: { nms: 0.3, prob: 0.479071463157368 } +covers: [stardist_logo.jpg] +dependencies: pip:requirements.txt +description: StarDist +documentation: README.md +format_version: 0.4.0 +git_repo: https://github.com/stardist/stardist +inputs: + - axes: byxc + data_range: [-.inf, .inf] + data_type: uint16 + name: input + preprocessing: + - name: scale_range + kwargs: + axes: yx + max_percentile: 99.8 + min_percentile: 1.0 + mode: per_sample + shape: + min: [1, 80, 80, 1] + step: [0, 17, 17, 0] +license: CC-BY-NC-4.0 +name: StardistExampleModel +outputs: + - axes: byxc + data_range: [-.inf, .inf] + data_type: float32 + halo: [0, 32, 32, 0] + name: output + shape: + offset: [0.0, 0.0, 0.0, 16.5] + reference_tensor: input + scale: [1.0, 1.0, 1.0, 0.0] +tags: + [stardist, segmentation, instance segmentation, object detection, tensorflow] +test_inputs: [test_input.npy] +test_outputs: [test_output0.npy] +timestamp: "2021-11-22T13:14:30.643565" +type: model +weights: + tensorflow_saved_model_bundle: + sha256: 444cbbbaa2267c999ae321cf66d77025284079684b6c203aa8b846b3c7c70376 + source: TF_SavedModel.zip + tensorflow_version: "1.15" diff --git a/example_descriptions/models/stardist_example_model/rdf_wrong_shape_v0_4.yaml b/example_descriptions/models/stardist_example_model/rdf_wrong_shape_v0_4.yaml new file mode 100644 index 000000000..f4499d6ec --- /dev/null +++ b/example_descriptions/models/stardist_example_model/rdf_wrong_shape_v0_4.yaml @@ -0,0 +1,56 @@ +authors: + - name: Uwe Schmidt + - name: Martin Weigert + github_user: maweigert +cite: + - text: Cell Detection with Star-Convex Polygons, + doi: "10.1007/978-3-030-00934-2_30" + - text: Star-convex Polyhedra for 3D Object Detection and Segmentation in Microscopy, + doi: "10.1109/WACV45572.2020.9093435" +config: + stardist: + stardist_version: 0.7.3 + thresholds: { nms: 0.3, prob: 0.479071463157368 } +covers: [stardist_logo.jpg] +dependencies: pip:requirements.txt +description: This spec contains an incorrect output shape description and is included for test purposes. +documentation: README.md +format_version: 0.4.0 +git_repo: https://github.com/stardist/stardist +inputs: + - axes: byxc + data_range: [-.inf, .inf] + data_type: uint16 + name: input + preprocessing: + - name: scale_range + kwargs: + axes: yx + max_percentile: 99.8 + min_percentile: 1.0 + mode: per_sample + shape: + min: [1, 80, 80, 1] + step: [0, 16, 16, 0] +license: CC-BY-NC-4.0 +name: StardistExampleModel +outputs: + - axes: byxc + data_range: [-.inf, .inf] + data_type: float32 + name: output + shape: + offset: [1, 1, 1, 33] # should be [0.0, 0.0, 0.0, 16.5] + reference_tensor: input + scale: [1.0, 1.0, 1.0, 0.0] +tags: + [stardist, segmentation, instance segmentation, object detection, tensorflow] +test_inputs: [test_input.npy] +test_outputs: [test_output0.npy] +timestamp: "2021-11-22T13:14:30.643565" +type: model +weights: + tensorflow_saved_model_bundle: + sha256: 444cbbbaa2267c999ae321cf66d77025284079684b6c203aa8b846b3c7c70376 + source: TF_SavedModel.zip + tensorflow_version: "1.15" diff --git a/example_specs/models/stardist_example_model/requirements.txt b/example_descriptions/models/stardist_example_model/requirements.txt similarity index 100% rename from example_specs/models/stardist_example_model/requirements.txt rename to example_descriptions/models/stardist_example_model/requirements.txt diff --git a/example_specs/models/stardist_example_model/stardist_logo.jpg b/example_descriptions/models/stardist_example_model/stardist_logo.jpg similarity index 100% rename from example_specs/models/stardist_example_model/stardist_logo.jpg rename to example_descriptions/models/stardist_example_model/stardist_logo.jpg diff --git a/example_specs/models/stardist_example_model/test_input.npy b/example_descriptions/models/stardist_example_model/test_input.npy similarity index 100% rename from example_specs/models/stardist_example_model/test_input.npy rename to example_descriptions/models/stardist_example_model/test_input.npy diff --git a/example_specs/models/stardist_example_model/test_output0.npy b/example_descriptions/models/stardist_example_model/test_output0.npy similarity index 100% rename from example_specs/models/stardist_example_model/test_output0.npy rename to example_descriptions/models/stardist_example_model/test_output0.npy diff --git a/example_specs/models/unet2d_diff_output_shape/cover.jpg b/example_descriptions/models/unet2d_diff_output_shape/cover.jpg similarity index 100% rename from example_specs/models/unet2d_diff_output_shape/cover.jpg rename to example_descriptions/models/unet2d_diff_output_shape/cover.jpg diff --git a/example_specs/models/unet2d_diff_output_shape/documentation.md b/example_descriptions/models/unet2d_diff_output_shape/documentation.md similarity index 100% rename from example_specs/models/unet2d_diff_output_shape/documentation.md rename to example_descriptions/models/unet2d_diff_output_shape/documentation.md diff --git a/example_specs/models/unet2d_diff_output_shape/environment.yaml b/example_descriptions/models/unet2d_diff_output_shape/environment.yaml similarity index 100% rename from example_specs/models/unet2d_diff_output_shape/environment.yaml rename to example_descriptions/models/unet2d_diff_output_shape/environment.yaml diff --git a/example_descriptions/models/unet2d_diff_output_shape/rdf_v0_4.yaml b/example_descriptions/models/unet2d_diff_output_shape/rdf_v0_4.yaml new file mode 100644 index 000000000..3c21fa122 --- /dev/null +++ b/example_descriptions/models/unet2d_diff_output_shape/rdf_v0_4.yaml @@ -0,0 +1,46 @@ +authors: + - { name: Constantin Pape, github_user: constantinpape } +cite: + - { text: training library, doi: "10.5281/zenodo.5108853" } + - { text: architecture, doi: "10.1007/978-3-319-24574-4_28" } +covers: [cover.jpg] +dependencies: conda:environment.yaml +description: Different output shape +documentation: documentation.md +format_version: 0.4.0 +git_repo: https://github.com/constantinpape/torch-em.git +inputs: + - axes: bcyx + data_range: [-.inf, .inf] + data_type: float32 + name: input + preprocessing: + - kwargs: { axes: cyx, mode: per_sample } + name: zero_mean_unit_variance + shape: + min: [1, 1, 32, 32] + step: [0, 0, 16, 16] +license: CC-BY-4.0 +links: [ilastik/ilastik] +name: Diff-output-shape +outputs: + - axes: bcyx + data_range: [-.inf, .inf] + data_type: float32 + name: output + shape: + offset: [0, 0, 0, 0] + reference_tensor: input + scale: [1, 1, 0.5, 0.5] +tags: [segmentation] +test_inputs: [test_input.npy] +test_outputs: [test_output.npy] +timestamp: "2021-09-12T14:18:58.389834Z" +type: model +weights: + pytorch_state_dict: + architecture: resize_unet.py:ResizeUNet + architecture_sha256: bc9e7fe5dd5d3a6c7a4ef7d32b6704169f887b7632f898fc07c762eea7a3feb5 + kwargs: { depth: 3, in_channels: 1, initial_features: 16, out_channels: 1 } + sha256: 2c475874f358eb75dc5f3b6af8d186e4fbf8da3acf43fb6662f0d5de21b0a838 + source: weights.pt diff --git a/example_specs/models/unet2d_diff_output_shape/resize_unet.py b/example_descriptions/models/unet2d_diff_output_shape/resize_unet.py similarity index 74% rename from example_specs/models/unet2d_diff_output_shape/resize_unet.py rename to example_descriptions/models/unet2d_diff_output_shape/resize_unet.py index 228201ec6..521b039eb 100644 --- a/example_specs/models/unet2d_diff_output_shape/resize_unet.py +++ b/example_descriptions/models/unet2d_diff_output_shape/resize_unet.py @@ -1,3 +1,4 @@ +# type: ignore from typing import Optional import torch @@ -10,7 +11,10 @@ class UNetBase(nn.Module): def __init__(self, encoder, base, decoder, out_conv=None, final_activation=None): super().__init__() if len(encoder) != len(decoder): - raise ValueError(f"Incompatible depth of encoder (depth={len(encoder)}) and decoder (depth={len(decoder)})") + raise ValueError( + f"Incompatible depth of encoder (depth={len(encoder)}) and decoder" + f" (depth={len(decoder)})" + ) self.encoder = encoder self.base = base @@ -82,7 +86,9 @@ def forward(self, x): def _update_conv_kwargs(kwargs, scale_factor): # if the scale factor is a scalar or all entries are the same we don't need to update the kwargs - if isinstance(scale_factor, int) or scale_factor.count(scale_factor[0]) == len(scale_factor): + if isinstance(scale_factor, int) or scale_factor.count(scale_factor[0]) == len( + scale_factor + ): return kwargs else: # otherwise set anisotropic kernel kernel_size = kwargs.get("kernel_size", 3) @@ -93,7 +99,9 @@ def _update_conv_kwargs(kwargs, scale_factor): if not (isinstance(kernel_size, int) and isinstance(padding, int)): return kwargs - kernel_size = tuple(1 if factor == 1 else kernel_size for factor in scale_factor) + kernel_size = tuple( + 1 if factor == 1 else kernel_size for factor in scale_factor + ) padding = tuple(0 if factor == 1 else padding for factor in scale_factor) kwargs.update({"kernel_size": kernel_size, "padding": padding}) return kwargs @@ -101,16 +109,26 @@ def _update_conv_kwargs(kwargs, scale_factor): class Encoder(nn.Module): def __init__( - self, features, scale_factors, conv_block_impl, pooler_impl, anisotropic_kernel=False, **conv_block_kwargs + self, + features, + scale_factors, + conv_block_impl, + pooler_impl, + anisotropic_kernel=False, + **conv_block_kwargs, ): super().__init__() if len(features) != len(scale_factors) + 1: - raise ValueError("Incompatible number of features {len(features)} and scale_factors {len(scale_factors)}") + raise ValueError( + "Incompatible number of features {len(features)} and scale_factors" + " {len(scale_factors)}" + ) conv_kwargs = [conv_block_kwargs] * len(scale_factors) if anisotropic_kernel: conv_kwargs = [ - _update_conv_kwargs(kwargs, scale_factor) for kwargs, scale_factor in zip(conv_kwargs, scale_factors) + _update_conv_kwargs(kwargs, scale_factor) + for kwargs, scale_factor in zip(conv_kwargs, scale_factors) ] self.blocks = nn.ModuleList( @@ -143,16 +161,26 @@ def forward(self, x): class Decoder(nn.Module): def __init__( - self, features, scale_factors, conv_block_impl, sampler_impl, anisotropic_kernel=False, **conv_block_kwargs + self, + features, + scale_factors, + conv_block_impl, + sampler_impl, + anisotropic_kernel=False, + **conv_block_kwargs, ): super().__init__() if len(features) != len(scale_factors) + 1: - raise ValueError("Incompatible number of features {len(features)} and scale_factors {len(scale_factors)}") + raise ValueError( + "Incompatible number of features {len(features)} and scale_factors" + " {len(scale_factors)}" + ) conv_kwargs = [conv_block_kwargs] * len(scale_factors) if anisotropic_kernel: conv_kwargs = [ - _update_conv_kwargs(kwargs, scale_factor) for kwargs, scale_factor in zip(conv_kwargs, scale_factors) + _update_conv_kwargs(kwargs, scale_factor) + for kwargs, scale_factor in zip(conv_kwargs, scale_factors) ] self.blocks = nn.ModuleList( @@ -162,7 +190,10 @@ def __init__( ] ) self.samplers = nn.ModuleList( - [sampler_impl(factor, inc, outc) for factor, inc, outc in zip(scale_factors, features[:-1], features[1:])] + [ + sampler_impl(factor, inc, outc) + for factor, inc, outc in zip(scale_factors, features[:-1], features[1:]) + ] ) self.return_outputs = False @@ -188,10 +219,15 @@ def _concat(self, x1, x2): def forward(self, x, encoder_inputs): if len(encoder_inputs) != len(self.blocks): - raise ValueError(f"Invalid number of encoder_inputs: expect {len(self.blocks)}, got {len(encoder_inputs)}") + raise ValueError( + f"Invalid number of encoder_inputs: expect {len(self.blocks)}, got" + f" {len(encoder_inputs)}" + ) decoder_out = [] - for block, sampler, from_encoder in zip(self.blocks, self.samplers, encoder_inputs): + for block, sampler, from_encoder in zip( + self.blocks, self.samplers, encoder_inputs + ): x = sampler(x) x = block(self._concat(x, from_encoder)) decoder_out.append(x) @@ -212,11 +248,22 @@ def get_norm_layer(norm, dim, channels, n_groups=32): elif norm == "BatchNorm": return nn.BatchNorm2d(channels) if dim == 2 else nn.BatchNorm3d(channels) else: - raise ValueError(f"Invalid norm: expect one of 'InstanceNorm', 'BatchNorm' or 'GroupNorm', got {norm}") + raise ValueError( + "Invalid norm: expect one of 'InstanceNorm', 'BatchNorm' or 'GroupNorm'," + f" got {norm}" + ) class ConvBlock(nn.Module): - def __init__(self, in_channels, out_channels, dim, kernel_size=3, padding=1, norm="InstanceNorm"): + def __init__( + self, + in_channels, + out_channels, + dim, + kernel_size=3, + padding=1, + norm="InstanceNorm", + ): super().__init__() self.in_channels = in_channels self.out_channels = out_channels @@ -225,18 +272,26 @@ def __init__(self, in_channels, out_channels, dim, kernel_size=3, padding=1, nor if norm is None: self.block = nn.Sequential( - conv(in_channels, out_channels, kernel_size=kernel_size, padding=padding), + conv( + in_channels, out_channels, kernel_size=kernel_size, padding=padding + ), nn.ReLU(inplace=True), - conv(out_channels, out_channels, kernel_size=kernel_size, padding=padding), + conv( + out_channels, out_channels, kernel_size=kernel_size, padding=padding + ), nn.ReLU(inplace=True), ) else: self.block = nn.Sequential( get_norm_layer(norm, dim, in_channels), - conv(in_channels, out_channels, kernel_size=kernel_size, padding=padding), + conv( + in_channels, out_channels, kernel_size=kernel_size, padding=padding + ), nn.ReLU(inplace=True), get_norm_layer(norm, dim, out_channels), - conv(out_channels, out_channels, kernel_size=kernel_size, padding=padding), + conv( + out_channels, out_channels, kernel_size=kernel_size, padding=padding + ), nn.ReLU(inplace=True), ) @@ -254,7 +309,9 @@ def __init__(self, scale_factor, in_channels, out_channels, dim, mode): self.conv = conv(in_channels, out_channels, 1) def forward(self, x): - x = nn.functional.interpolate(x, scale_factor=self.scale_factor, mode=self.mode, align_corners=False) + x = nn.functional.interpolate( + x, scale_factor=self.scale_factor, mode=self.mode, align_corners=False + ) x = self.conv(x) return x @@ -289,7 +346,9 @@ def __init__( sampler_impl=Upsampler2d, **conv_block_kwargs, ): - features_encoder = [in_channels] + [initial_features * gain**i for i in range(depth)] + features_encoder = [in_channels] + [ + initial_features * gain**i for i in range(depth) + ] features_decoder = [initial_features * gain**i for i in range(depth + 1)][::-1] scale_factors = depth * [2] @@ -299,10 +358,17 @@ def __init__( if len(out_channels) != depth: raise ValueError() out_conv: Optional[nn.Module] = nn.ModuleList( - [nn.Conv2d(feat, outc, 1) for feat, outc in zip(features_decoder[1:], out_channels)] + [ + nn.Conv2d(feat, outc, 1) + for feat, outc in zip(features_decoder[1:], out_channels) + ] ) else: - out_conv = None if out_channels is None else nn.Conv2d(features_decoder[-1], out_channels, 1) + out_conv = ( + None + if out_channels is None + else nn.Conv2d(features_decoder[-1], out_channels, 1) + ) super().__init__( encoder=Encoder( @@ -319,7 +385,9 @@ def __init__( sampler_impl=sampler_impl, **conv_block_kwargs, ), - base=conv_block_impl(features_encoder[-1], features_encoder[-1] * gain, **conv_block_kwargs), + base=conv_block_impl( + features_encoder[-1], features_encoder[-1] * gain, **conv_block_kwargs + ), out_conv=out_conv, final_activation=final_activation, ) diff --git a/example_specs/models/unet2d_diff_output_shape/test_input.npy b/example_descriptions/models/unet2d_diff_output_shape/test_input.npy similarity index 100% rename from example_specs/models/unet2d_diff_output_shape/test_input.npy rename to example_descriptions/models/unet2d_diff_output_shape/test_input.npy diff --git a/example_specs/models/unet2d_diff_output_shape/test_output.npy b/example_descriptions/models/unet2d_diff_output_shape/test_output.npy similarity index 100% rename from example_specs/models/unet2d_diff_output_shape/test_output.npy rename to example_descriptions/models/unet2d_diff_output_shape/test_output.npy diff --git a/example_specs/models/unet2d_diff_output_shape/weights.pt b/example_descriptions/models/unet2d_diff_output_shape/weights.pt similarity index 100% rename from example_specs/models/unet2d_diff_output_shape/weights.pt rename to example_descriptions/models/unet2d_diff_output_shape/weights.pt diff --git a/example_specs/models/unet2d_fixed_shape/cover.jpg b/example_descriptions/models/unet2d_fixed_shape/cover.jpg similarity index 100% rename from example_specs/models/unet2d_fixed_shape/cover.jpg rename to example_descriptions/models/unet2d_fixed_shape/cover.jpg diff --git a/example_specs/models/unet2d_fixed_shape/documentation.md b/example_descriptions/models/unet2d_fixed_shape/documentation.md similarity index 100% rename from example_specs/models/unet2d_fixed_shape/documentation.md rename to example_descriptions/models/unet2d_fixed_shape/documentation.md diff --git a/example_specs/models/unet2d_fixed_shape/environment.yaml b/example_descriptions/models/unet2d_fixed_shape/environment.yaml similarity index 100% rename from example_specs/models/unet2d_fixed_shape/environment.yaml rename to example_descriptions/models/unet2d_fixed_shape/environment.yaml diff --git a/example_descriptions/models/unet2d_fixed_shape/rdf_v0_4.yaml b/example_descriptions/models/unet2d_fixed_shape/rdf_v0_4.yaml new file mode 100644 index 000000000..31b48e619 --- /dev/null +++ b/example_descriptions/models/unet2d_fixed_shape/rdf_v0_4.yaml @@ -0,0 +1,57 @@ +authors: + - { name: Constantin Pape, github_user: constantinpape } +maintainers: + - { name: Constantin Pape, github_user: constantinpape } + - { name: Fynn Beuttenmueller, github_user: fynnbe } +cite: + - text: training library + doi: "10.5281/zenodo.5108853" + - text: architecture + doi: "10.1007/978-3-319-24574-4_28" +covers: ["cover.jpg"] +description: Fixed shape +documentation: documentation.md +format_version: 0.4.9 +git_repo: https://github.com/constantinpape/torch-em.git +inputs: + - axes: &axes bcyx + data_range: [-.inf, .inf] + data_type: float32 + name: input + preprocessing: + - kwargs: { axes: cyx, mode: per_sample } + name: zero_mean_unit_variance + shape: &shape [1, 1, 256, 256] +license: CC-BY-4.0 +links: [ilastik/ilastik] +name: Fixed-shape +outputs: + - axes: *axes + data_range: [-.inf, .inf] + data_type: float32 + halo: [0, 0, 8, 8] + name: output + shape: *shape +tags: [segmentation] +test_inputs: ["test_input.npy"] +test_outputs: ["test_output.npy"] +timestamp: "2021-09-12T14:04:52.211533Z" +type: model +weights: + pytorch_state_dict: + architecture: unet.py:UNet2d + architecture_sha256: 7f5b15948e8e2c91f78dcff34fbf30af517073e91ba487f3edb982b948d099b3 + dependencies: conda:environment.yaml + kwargs: + { + depth: 3, + final_activation: null, + gain: 2, + in_channels: 1, + initial_features: 16, + out_channels: 1, + postprocessing: null, + return_side_outputs: false, + } + sha256: bcf26f08c26f5ef8f891c08dba2c8812bec7991643fb7c0f7b5d3e7dc321092f + source: weights.pt diff --git a/example_specs/models/unet2d_fixed_shape/test_input.npy b/example_descriptions/models/unet2d_fixed_shape/test_input.npy similarity index 100% rename from example_specs/models/unet2d_fixed_shape/test_input.npy rename to example_descriptions/models/unet2d_fixed_shape/test_input.npy diff --git a/example_specs/models/unet2d_fixed_shape/test_output.npy b/example_descriptions/models/unet2d_fixed_shape/test_output.npy similarity index 100% rename from example_specs/models/unet2d_fixed_shape/test_output.npy rename to example_descriptions/models/unet2d_fixed_shape/test_output.npy diff --git a/example_specs/models/unet2d_fixed_shape/unet.py b/example_descriptions/models/unet2d_fixed_shape/unet.py similarity index 78% rename from example_specs/models/unet2d_fixed_shape/unet.py rename to example_descriptions/models/unet2d_fixed_shape/unet.py index 4942b3403..3533ffa75 100644 --- a/example_specs/models/unet2d_fixed_shape/unet.py +++ b/example_descriptions/models/unet2d_fixed_shape/unet.py @@ -1,9 +1,9 @@ +# type: ignore from typing import Optional import torch import torch.nn as nn - # # Model Internal Post-processing # @@ -42,7 +42,9 @@ def affinities_to_boundaries(aff_channels, accumulator="max"): return AccumulateChannels(None, aff_channels, accumulator) -def affinities_with_foreground_to_boundaries(aff_channels, fg_channel=(0, 1), accumulator="max"): +def affinities_with_foreground_to_boundaries( + aff_channels, fg_channel=(0, 1), accumulator="max" +): return AccumulateChannels(fg_channel, aff_channels, accumulator) @@ -69,9 +71,13 @@ def affinities_to_boundaries_anisotropic(): POSTPROCESSING = { "affinities_to_boundaries_anisotropic": affinities_to_boundaries_anisotropic, "affinities_to_boundaries2d": affinities_to_boundaries2d, - "affinities_with_foreground_to_boundaries2d": affinities_with_foreground_to_boundaries2d, + "affinities_with_foreground_to_boundaries2d": ( + affinities_with_foreground_to_boundaries2d + ), "affinities_to_boundaries3d": affinities_to_boundaries3d, - "affinities_with_foreground_to_boundaries3d": affinities_with_foreground_to_boundaries3d, + "affinities_with_foreground_to_boundaries3d": ( + affinities_with_foreground_to_boundaries3d + ), } @@ -83,10 +89,21 @@ def affinities_to_boundaries_anisotropic(): class UNetBase(nn.Module): """ """ - def __init__(self, encoder, base, decoder, out_conv=None, final_activation=None, postprocessing=None): + def __init__( + self, + encoder, + base, + decoder, + out_conv=None, + final_activation=None, + postprocessing=None, + ): super().__init__() if len(encoder) != len(decoder): - raise ValueError(f"Incompatible depth of encoder (depth={len(encoder)}) and decoder (depth={len(decoder)})") + raise ValueError( + f"Incompatible depth of encoder (depth={len(encoder)}) and decoder" + f" (depth={len(decoder)})" + ) self.encoder = encoder self.base = base @@ -97,9 +114,14 @@ def __init__(self, encoder, base, decoder, out_conv=None, final_activation=None, self._out_channels = self.decoder.out_channels elif isinstance(out_conv, nn.ModuleList): if len(out_conv) != len(self.decoder): - raise ValueError(f"Invalid length of out_conv, expected {len(decoder)}, got {len(out_conv)}") + raise ValueError( + f"Invalid length of out_conv, expected {len(decoder)}, got" + f" {len(out_conv)}" + ) self.return_decoder_outputs = True - self._out_channels = [None if conv is None else conv.out_channels for conv in out_conv] + self._out_channels = [ + None if conv is None else conv.out_channels for conv in out_conv + ] else: self.return_decoder_outputs = False self._out_channels = out_conv.out_channels @@ -198,7 +220,9 @@ def forward(self, x): def _update_conv_kwargs(kwargs, scale_factor): # if the scale factor is a scalar or all entries are the same we don't need to update the kwargs - if isinstance(scale_factor, int) or scale_factor.count(scale_factor[0]) == len(scale_factor): + if isinstance(scale_factor, int) or scale_factor.count(scale_factor[0]) == len( + scale_factor + ): return kwargs else: # otherwise set anisotropic kernel kernel_size = kwargs.get("kernel_size", 3) @@ -209,7 +233,9 @@ def _update_conv_kwargs(kwargs, scale_factor): if not (isinstance(kernel_size, int) and isinstance(padding, int)): return kwargs - kernel_size = tuple(1 if factor == 1 else kernel_size for factor in scale_factor) + kernel_size = tuple( + 1 if factor == 1 else kernel_size for factor in scale_factor + ) padding = tuple(0 if factor == 1 else padding for factor in scale_factor) kwargs.update({"kernel_size": kernel_size, "padding": padding}) return kwargs @@ -217,16 +243,26 @@ def _update_conv_kwargs(kwargs, scale_factor): class Encoder(nn.Module): def __init__( - self, features, scale_factors, conv_block_impl, pooler_impl, anisotropic_kernel=False, **conv_block_kwargs + self, + features, + scale_factors, + conv_block_impl, + pooler_impl, + anisotropic_kernel=False, + **conv_block_kwargs, ): super().__init__() if len(features) != len(scale_factors) + 1: - raise ValueError("Incompatible number of features {len(features)} and scale_factors {len(scale_factors)}") + raise ValueError( + "Incompatible number of features {len(features)} and scale_factors" + " {len(scale_factors)}" + ) conv_kwargs = [conv_block_kwargs] * len(scale_factors) if anisotropic_kernel: conv_kwargs = [ - _update_conv_kwargs(kwargs, scale_factor) for kwargs, scale_factor in zip(conv_kwargs, scale_factors) + _update_conv_kwargs(kwargs, scale_factor) + for kwargs, scale_factor in zip(conv_kwargs, scale_factors) ] self.blocks = nn.ModuleList( @@ -259,16 +295,26 @@ def forward(self, x): class Decoder(nn.Module): def __init__( - self, features, scale_factors, conv_block_impl, sampler_impl, anisotropic_kernel=False, **conv_block_kwargs + self, + features, + scale_factors, + conv_block_impl, + sampler_impl, + anisotropic_kernel=False, + **conv_block_kwargs, ): super().__init__() if len(features) != len(scale_factors) + 1: - raise ValueError("Incompatible number of features {len(features)} and scale_factors {len(scale_factors)}") + raise ValueError( + "Incompatible number of features {len(features)} and scale_factors" + " {len(scale_factors)}" + ) conv_kwargs = [conv_block_kwargs] * len(scale_factors) if anisotropic_kernel: conv_kwargs = [ - _update_conv_kwargs(kwargs, scale_factor) for kwargs, scale_factor in zip(conv_kwargs, scale_factors) + _update_conv_kwargs(kwargs, scale_factor) + for kwargs, scale_factor in zip(conv_kwargs, scale_factors) ] self.blocks = nn.ModuleList( @@ -278,7 +324,10 @@ def __init__( ] ) self.samplers = nn.ModuleList( - [sampler_impl(factor, inc, outc) for factor, inc, outc in zip(scale_factors, features[:-1], features[1:])] + [ + sampler_impl(factor, inc, outc) + for factor, inc, outc in zip(scale_factors, features[:-1], features[1:]) + ] ) self.return_outputs = False @@ -304,10 +353,15 @@ def _concat(self, x1, x2): def forward(self, x, encoder_inputs): if len(encoder_inputs) != len(self.blocks): - raise ValueError(f"Invalid number of encoder_inputs: expect {len(self.blocks)}, got {len(encoder_inputs)}") + raise ValueError( + f"Invalid number of encoder_inputs: expect {len(self.blocks)}, got" + f" {len(encoder_inputs)}" + ) decoder_out = [] - for block, sampler, from_encoder in zip(self.blocks, self.samplers, encoder_inputs): + for block, sampler, from_encoder in zip( + self.blocks, self.samplers, encoder_inputs + ): x = sampler(x) x = block(self._concat(x, from_encoder)) decoder_out.append(x) @@ -328,11 +382,22 @@ def get_norm_layer(norm, dim, channels, n_groups=32): elif norm == "BatchNorm": return nn.BatchNorm2d(channels) if dim == 2 else nn.BatchNorm3d(channels) else: - raise ValueError(f"Invalid norm: expect one of 'InstanceNorm', 'BatchNorm' or 'GroupNorm', got {norm}") + raise ValueError( + "Invalid norm: expect one of 'InstanceNorm', 'BatchNorm' or 'GroupNorm'," + f" got {norm}" + ) class ConvBlock(nn.Module): - def __init__(self, in_channels, out_channels, dim, kernel_size=3, padding=1, norm="InstanceNorm"): + def __init__( + self, + in_channels, + out_channels, + dim, + kernel_size=3, + padding=1, + norm="InstanceNorm", + ): super().__init__() self.in_channels = in_channels self.out_channels = out_channels @@ -341,18 +406,26 @@ def __init__(self, in_channels, out_channels, dim, kernel_size=3, padding=1, nor if norm is None: self.block = nn.Sequential( - conv(in_channels, out_channels, kernel_size=kernel_size, padding=padding), + conv( + in_channels, out_channels, kernel_size=kernel_size, padding=padding + ), nn.ReLU(inplace=True), - conv(out_channels, out_channels, kernel_size=kernel_size, padding=padding), + conv( + out_channels, out_channels, kernel_size=kernel_size, padding=padding + ), nn.ReLU(inplace=True), ) else: self.block = nn.Sequential( get_norm_layer(norm, dim, in_channels), - conv(in_channels, out_channels, kernel_size=kernel_size, padding=padding), + conv( + in_channels, out_channels, kernel_size=kernel_size, padding=padding + ), nn.ReLU(inplace=True), get_norm_layer(norm, dim, out_channels), - conv(out_channels, out_channels, kernel_size=kernel_size, padding=padding), + conv( + out_channels, out_channels, kernel_size=kernel_size, padding=padding + ), nn.ReLU(inplace=True), ) @@ -370,7 +443,9 @@ def __init__(self, scale_factor, in_channels, out_channels, dim, mode): self.conv = conv(in_channels, out_channels, 1) def forward(self, x): - x = nn.functional.interpolate(x, scale_factor=self.scale_factor, mode=self.mode, align_corners=False) + x = nn.functional.interpolate( + x, scale_factor=self.scale_factor, mode=self.mode, align_corners=False + ) x = self.conv(x) return x @@ -406,7 +481,9 @@ def __init__( postprocessing=None, **conv_block_kwargs, ): - features_encoder = [in_channels] + [initial_features * gain**i for i in range(depth)] + features_encoder = [in_channels] + [ + initial_features * gain**i for i in range(depth) + ] features_decoder = [initial_features * gain**i for i in range(depth + 1)][::-1] scale_factors = depth * [2] @@ -416,10 +493,17 @@ def __init__( if len(out_channels) != depth: raise ValueError() out_conv: Optional[nn.Module] = nn.ModuleList( - [nn.Conv2d(feat, outc, 1) for feat, outc in zip(features_decoder[1:], out_channels)] + [ + nn.Conv2d(feat, outc, 1) + for feat, outc in zip(features_decoder[1:], out_channels) + ] ) else: - out_conv = None if out_channels is None else nn.Conv2d(features_decoder[-1], out_channels, 1) + out_conv = ( + None + if out_channels is None + else nn.Conv2d(features_decoder[-1], out_channels, 1) + ) super().__init__( encoder=Encoder( @@ -436,7 +520,9 @@ def __init__( sampler_impl=sampler_impl, **conv_block_kwargs, ), - base=conv_block_impl(features_encoder[-1], features_encoder[-1] * gain, **conv_block_kwargs), + base=conv_block_impl( + features_encoder[-1], features_encoder[-1] * gain, **conv_block_kwargs + ), out_conv=out_conv, final_activation=final_activation, postprocessing=postprocessing, @@ -488,7 +574,9 @@ def __init__( **conv_block_kwargs, ): depth = len(scale_factors) - features_encoder = [in_channels] + [initial_features * gain**i for i in range(depth)] + features_encoder = [in_channels] + [ + initial_features * gain**i for i in range(depth) + ] features_decoder = [initial_features * gain**i for i in range(depth + 1)][::-1] if return_side_outputs: @@ -497,10 +585,17 @@ def __init__( if len(out_channels) != depth: raise ValueError() out_conv: Optional[nn.Module] = nn.ModuleList( - [nn.Conv3d(feat, outc, 1) for feat, outc in zip(features_decoder[1:], out_channels)] + [ + nn.Conv3d(feat, outc, 1) + for feat, outc in zip(features_decoder[1:], out_channels) + ] ) else: - out_conv = None if out_channels is None else nn.Conv3d(features_decoder[-1], out_channels, 1) + out_conv = ( + None + if out_channels is None + else nn.Conv3d(features_decoder[-1], out_channels, 1) + ) super().__init__( encoder=Encoder( @@ -519,7 +614,9 @@ def __init__( anisotropic_kernel=anisotropic_kernel, **conv_block_kwargs, ), - base=conv_block_impl(features_encoder[-1], features_encoder[-1] * gain, **conv_block_kwargs), + base=conv_block_impl( + features_encoder[-1], features_encoder[-1] * gain, **conv_block_kwargs + ), out_conv=out_conv, final_activation=final_activation, postprocessing=postprocessing, diff --git a/example_specs/models/unet2d_fixed_shape/weights.pt b/example_descriptions/models/unet2d_fixed_shape/weights.pt similarity index 100% rename from example_specs/models/unet2d_fixed_shape/weights.pt rename to example_descriptions/models/unet2d_fixed_shape/weights.pt diff --git a/example_specs/models/unet2d_keras_tf/README.md b/example_descriptions/models/unet2d_keras_tf/README.md similarity index 100% rename from example_specs/models/unet2d_keras_tf/README.md rename to example_descriptions/models/unet2d_keras_tf/README.md diff --git a/example_specs/models/unet2d_keras_tf/input.png b/example_descriptions/models/unet2d_keras_tf/input.png similarity index 100% rename from example_specs/models/unet2d_keras_tf/input.png rename to example_descriptions/models/unet2d_keras_tf/input.png diff --git a/example_specs/models/unet2d_keras_tf/keras_weights.hdf5 b/example_descriptions/models/unet2d_keras_tf/keras_weights.hdf5 similarity index 100% rename from example_specs/models/unet2d_keras_tf/keras_weights.hdf5 rename to example_descriptions/models/unet2d_keras_tf/keras_weights.hdf5 diff --git a/example_specs/models/unet2d_keras_tf/output.png b/example_descriptions/models/unet2d_keras_tf/output.png similarity index 100% rename from example_specs/models/unet2d_keras_tf/output.png rename to example_descriptions/models/unet2d_keras_tf/output.png diff --git a/example_specs/models/unet2d_keras_tf/per_sample_scale_range.ijm b/example_descriptions/models/unet2d_keras_tf/per_sample_scale_range.ijm similarity index 100% rename from example_specs/models/unet2d_keras_tf/per_sample_scale_range.ijm rename to example_descriptions/models/unet2d_keras_tf/per_sample_scale_range.ijm diff --git a/example_descriptions/models/unet2d_keras_tf/rdf_v0_4.yaml b/example_descriptions/models/unet2d_keras_tf/rdf_v0_4.yaml new file mode 100644 index 000000000..a47e3c956 --- /dev/null +++ b/example_descriptions/models/unet2d_keras_tf/rdf_v0_4.yaml @@ -0,0 +1,72 @@ +attachments: + files: [per_sample_scale_range.ijm] +authors: + - affiliation: zerocost + name: zerocost + github_user: HenriquesLab +cite: + - text: Falk et al. Nature Methods 2019 + doi: "10.1038/s41592-018-0261-2" + - text: Ronneberger et al. arXiv in 2015 + doi: "10.1007/978-3-319-24574-4_28" + - text: Lucas von Chamier et al. biorXiv 2020 + doi: "10.1101/2020.03.20.000133" +config: + deepimagej: + allow_tiling: true + model_keys: null + prediction: + postprocess: + - { spec: null } + preprocess: + - { kwargs: per_sample_scale_range.ijm, spec: "ij.IJ::runMacroFile" } + pyramidal_model: false + test_information: + inputs: + - name: sample_input_0.tif + pixel_size: { x: 1.0, y: 1.0, z: 1.0 } + size: 512 x 512 x 1 x 1 + memory_peak: null + outputs: + - { name: sample_output_0.tif, size: 512 x 512 x 1 x 1, type: image } + runtime: null +covers: [input.png, output.png] +description: 2D UNet trained on data from the ISBI 2D segmentation challenge. +documentation: README.md +format_version: 0.4.0 +inputs: + - axes: bxyc + data_range: [-.inf, .inf] + data_type: float32 + name: input + preprocessing: + - kwargs: + { + axes: xyc, + max_percentile: 99.8, + min_percentile: 1.0, + mode: per_sample, + } + name: scale_range + shape: [1, 512, 512, 1] +license: MIT +links: [deepimagej/deepimagej] +name: Unet2DKeras +outputs: + - axes: bxyc + data_range: [-.inf, .inf] + data_type: float64 + name: output + shape: [1, 512, 512, 1] +sample_inputs: [sample_input_0.tif] +sample_outputs: [sample_output_0.tif] +tags: [zerocostdl4mic, deepimagej, segmentation, tem, unet] +test_inputs: [test_input.npy] +test_outputs: [test_output.npy] +timestamp: "2021-11-30T11:37:51.374314Z" +type: model +weights: + keras_hdf5: + sha256: 2c0d85bbb37f424d4927bd6ccd6537bac815b1c0ed5f0b4bfa313be3efba96af + source: keras_weights.hdf5 + tensorflow_version: "1.15" diff --git a/example_specs/models/unet2d_keras_tf/sample_input_0.tif b/example_descriptions/models/unet2d_keras_tf/sample_input_0.tif similarity index 100% rename from example_specs/models/unet2d_keras_tf/sample_input_0.tif rename to example_descriptions/models/unet2d_keras_tf/sample_input_0.tif diff --git a/example_specs/models/unet2d_keras_tf/sample_output_0.tif b/example_descriptions/models/unet2d_keras_tf/sample_output_0.tif similarity index 100% rename from example_specs/models/unet2d_keras_tf/sample_output_0.tif rename to example_descriptions/models/unet2d_keras_tf/sample_output_0.tif diff --git a/example_specs/models/unet2d_keras_tf/test_input.npy b/example_descriptions/models/unet2d_keras_tf/test_input.npy similarity index 100% rename from example_specs/models/unet2d_keras_tf/test_input.npy rename to example_descriptions/models/unet2d_keras_tf/test_input.npy diff --git a/example_specs/models/unet2d_keras_tf/test_output.npy b/example_descriptions/models/unet2d_keras_tf/test_output.npy similarity index 100% rename from example_specs/models/unet2d_keras_tf/test_output.npy rename to example_descriptions/models/unet2d_keras_tf/test_output.npy diff --git a/example_specs/models/unet2d_keras_tf2/Contours2InstanceSegmentation.ijm b/example_descriptions/models/unet2d_keras_tf2/Contours2InstanceSegmentation.ijm similarity index 100% rename from example_specs/models/unet2d_keras_tf2/Contours2InstanceSegmentation.ijm rename to example_descriptions/models/unet2d_keras_tf2/Contours2InstanceSegmentation.ijm diff --git a/example_specs/models/unet2d_keras_tf2/README.md b/example_descriptions/models/unet2d_keras_tf2/README.md similarity index 100% rename from example_specs/models/unet2d_keras_tf2/README.md rename to example_descriptions/models/unet2d_keras_tf2/README.md diff --git a/example_specs/models/unet2d_keras_tf2/cover.png b/example_descriptions/models/unet2d_keras_tf2/cover.png similarity index 100% rename from example_specs/models/unet2d_keras_tf2/cover.png rename to example_descriptions/models/unet2d_keras_tf2/cover.png diff --git a/example_specs/models/unet2d_keras_tf2/cover_1.png b/example_descriptions/models/unet2d_keras_tf2/cover_1.png similarity index 100% rename from example_specs/models/unet2d_keras_tf2/cover_1.png rename to example_descriptions/models/unet2d_keras_tf2/cover_1.png diff --git a/example_specs/models/unet2d_keras_tf2/keras_weights.hdf5 b/example_descriptions/models/unet2d_keras_tf2/keras_weights.hdf5 similarity index 100% rename from example_specs/models/unet2d_keras_tf2/keras_weights.hdf5 rename to example_descriptions/models/unet2d_keras_tf2/keras_weights.hdf5 diff --git a/example_specs/models/unet2d_keras_tf2/per_sample_scale_range.ijm b/example_descriptions/models/unet2d_keras_tf2/per_sample_scale_range.ijm similarity index 100% rename from example_specs/models/unet2d_keras_tf2/per_sample_scale_range.ijm rename to example_descriptions/models/unet2d_keras_tf2/per_sample_scale_range.ijm diff --git a/example_descriptions/models/unet2d_keras_tf2/rdf_v0_4.yaml b/example_descriptions/models/unet2d_keras_tf2/rdf_v0_4.yaml new file mode 100644 index 000000000..9bc626b3f --- /dev/null +++ b/example_descriptions/models/unet2d_keras_tf2/rdf_v0_4.yaml @@ -0,0 +1,116 @@ +attachments: + files: + - per_sample_scale_range.ijm + - Contours2InstanceSegmentation.ijm + - training_evaluation.csv +authors: + - name: Estibaliz Gomez de Mariscal + affiliation: Instituto Gulbenkian de Ciencia +cite: + - doi: "10.1038/s41592-018-0261-2" + text: Falk et al. Nature Methods + 2019 + - doi: "10.1007/978-3-319-24574-4_28" + text: Ronneberger et al. arXiv in 2015 + - doi: "10.1101/2020.03.20.000133" + text: Lucas von Chamier et al. biorXiv 2020 + - doi: "10.1038/s42003-022-03634-z" + text: Christoph Spahn et al. + Communications Biology 2022 +config: + _id: 10.5281/zenodo.7261974/7261975 + bioimageio: + created: "2022-10-28 17:06:39.509102" + doi: 10.5281/zenodo.7261975 + nickname: placid-llama + nickname_icon: "\U0001F999" + owners: [147356] + status: accepted + version_id: "7261975" + version_name: version 1 + deepimagej: + allow_tiling: true + model_keys: null + prediction: + postprocess: + - { spec: null } + preprocess: + - { kwargs: per_sample_scale_range.ijm, spec: "ij.IJ::runMacroFile" } + pyramidal_model: false + test_information: + inputs: + - name: test_input.npy + pixel_size: { x: 0.065, y: 0.065, z: 1 } + size: 256 x 256 x 1 x 1 + memory_peak: null + outputs: + - { name: test_output.npy, size: 256 x 256 x 1 x 3, type: image } + runtime: null +covers: [cover_1.png, cover.png] +description: + This trained 2D U-Net model segments the contour, foreground and background + of Bacillus Subtilis bacteria imaged with Widefield microscopy images. The current + segmentation enables running further watershed segmentation to obtain a unique label + for each individual bacteria detected in the image. +documentation: README.md +format_version: 0.4.8 +inputs: + - axes: bxyc + data_range: [-.inf, .inf] + data_type: float32 + name: input + preprocessing: + - kwargs: + { + axes: xyc, + max_percentile: 99.8, + min_percentile: 1.0, + mode: per_sample, + } + name: scale_range + shape: [1, 256, 256, 1] +license: MIT +links: [deepimagej/deepimagej, imjoy/BioImageIO-Packager, ilastik/ilastik] +maintainers: + - { github_user: esgomezm } +name: "B. Sutilist bacteria segmentation - Widefield microscopy - 2D UNet" +outputs: + - axes: bxyc + data_range: [-.inf, .inf] + data_type: float32 + name: output + shape: + offset: [0.0, 0.0, 0.0, 0.0] + reference_tensor: input + scale: [1.0, 1.0, 1.0, 3.0] +rdf_source: https://bioimage-io.github.io/collection-bioimage-io/rdfs/10.5281/zenodo.7261974/7261975/rdf.yaml +sample_inputs: [sample_input_0.tif] +sample_outputs: [sample_output_0.tif] +tags: + [ + zerocostdl4mic, + segmentation, + 2d, + unet, + bacillus-subtilis, + fluorescence-light-microscopy, + cells, + semantic-segmentation, + bacteria, + microbiology, + ] +test_inputs: [test_input.npy] +test_outputs: [test_output.npy] +timestamp: "2022-10-28T16:20:17.690336Z" +type: model +version: 0.1.0 +weights: + keras_hdf5: + sha256: 8f64a31164843648f1fc2e4c57c08299b9bae4b18dbeecb9f823585ca971e90b + source: keras_weights.hdf5 + tensorflow_version: 2.9.2 + tensorflow_saved_model_bundle: + sha256: 40333b1333594edff9b0cc4243da5304addcacf2e77a5b12f062f102f6005c56 + source: tf-weights.zip + tensorflow_version: 2.6.2 + parent: keras_hdf5 diff --git a/example_specs/models/unet2d_keras_tf2/sample_input_0.tif b/example_descriptions/models/unet2d_keras_tf2/sample_input_0.tif similarity index 100% rename from example_specs/models/unet2d_keras_tf2/sample_input_0.tif rename to example_descriptions/models/unet2d_keras_tf2/sample_input_0.tif diff --git a/example_specs/models/unet2d_keras_tf2/sample_output_0.tif b/example_descriptions/models/unet2d_keras_tf2/sample_output_0.tif similarity index 100% rename from example_specs/models/unet2d_keras_tf2/sample_output_0.tif rename to example_descriptions/models/unet2d_keras_tf2/sample_output_0.tif diff --git a/example_specs/models/unet2d_keras_tf2/test_input.npy b/example_descriptions/models/unet2d_keras_tf2/test_input.npy similarity index 100% rename from example_specs/models/unet2d_keras_tf2/test_input.npy rename to example_descriptions/models/unet2d_keras_tf2/test_input.npy diff --git a/example_specs/models/unet2d_keras_tf2/test_output.npy b/example_descriptions/models/unet2d_keras_tf2/test_output.npy similarity index 100% rename from example_specs/models/unet2d_keras_tf2/test_output.npy rename to example_descriptions/models/unet2d_keras_tf2/test_output.npy diff --git a/example_specs/models/unet2d_keras_tf2/tf-weights.zip b/example_descriptions/models/unet2d_keras_tf2/tf-weights.zip similarity index 100% rename from example_specs/models/unet2d_keras_tf2/tf-weights.zip rename to example_descriptions/models/unet2d_keras_tf2/tf-weights.zip diff --git a/example_specs/models/unet2d_keras_tf2/training_evaluation.csv b/example_descriptions/models/unet2d_keras_tf2/training_evaluation.csv similarity index 100% rename from example_specs/models/unet2d_keras_tf2/training_evaluation.csv rename to example_descriptions/models/unet2d_keras_tf2/training_evaluation.csv diff --git a/example_specs/models/unet2d_multi_tensor/cover.jpg b/example_descriptions/models/unet2d_multi_tensor/cover.jpg similarity index 100% rename from example_specs/models/unet2d_multi_tensor/cover.jpg rename to example_descriptions/models/unet2d_multi_tensor/cover.jpg diff --git a/example_specs/models/unet2d_multi_tensor/documentation.md b/example_descriptions/models/unet2d_multi_tensor/documentation.md similarity index 100% rename from example_specs/models/unet2d_multi_tensor/documentation.md rename to example_descriptions/models/unet2d_multi_tensor/documentation.md diff --git a/example_specs/models/unet2d_multi_tensor/environment.yaml b/example_descriptions/models/unet2d_multi_tensor/environment.yaml similarity index 100% rename from example_specs/models/unet2d_multi_tensor/environment.yaml rename to example_descriptions/models/unet2d_multi_tensor/environment.yaml diff --git a/example_specs/models/unet2d_multi_tensor/multi_tensor_unet.py b/example_descriptions/models/unet2d_multi_tensor/multi_tensor_unet.py similarity index 75% rename from example_specs/models/unet2d_multi_tensor/multi_tensor_unet.py rename to example_descriptions/models/unet2d_multi_tensor/multi_tensor_unet.py index 647d0466a..6127717b7 100644 --- a/example_specs/models/unet2d_multi_tensor/multi_tensor_unet.py +++ b/example_descriptions/models/unet2d_multi_tensor/multi_tensor_unet.py @@ -9,7 +9,10 @@ class UNetBase(nn.Module): def __init__(self, encoder, base, decoder, out_conv=None, final_activation=None): super().__init__() if len(encoder) != len(decoder): - raise ValueError(f"Incompatible depth of encoder (depth={len(encoder)}) and decoder (depth={len(decoder)})") + raise ValueError( + f"Incompatible depth of encoder (depth={len(encoder)}) and decoder" + f" (depth={len(decoder)})" + ) self.encoder = encoder self.base = base @@ -86,7 +89,9 @@ def forward(self, *x): def _update_conv_kwargs(kwargs, scale_factor): # if the scale factor is a scalar or all entries are the same we don't need to update the kwargs - if isinstance(scale_factor, int) or scale_factor.count(scale_factor[0]) == len(scale_factor): + if isinstance(scale_factor, int) or scale_factor.count(scale_factor[0]) == len( + scale_factor + ): return kwargs else: # otherwise set anisotropic kernel kernel_size = kwargs.get("kernel_size", 3) @@ -97,7 +102,9 @@ def _update_conv_kwargs(kwargs, scale_factor): if not (isinstance(kernel_size, int) and isinstance(padding, int)): return kwargs - kernel_size = tuple(1 if factor == 1 else kernel_size for factor in scale_factor) + kernel_size = tuple( + 1 if factor == 1 else kernel_size for factor in scale_factor + ) padding = tuple(0 if factor == 1 else padding for factor in scale_factor) kwargs.update({"kernel_size": kernel_size, "padding": padding}) return kwargs @@ -105,16 +112,26 @@ def _update_conv_kwargs(kwargs, scale_factor): class Encoder(nn.Module): def __init__( - self, features, scale_factors, conv_block_impl, pooler_impl, anisotropic_kernel=False, **conv_block_kwargs + self, + features, + scale_factors, + conv_block_impl, + pooler_impl, + anisotropic_kernel=False, + **conv_block_kwargs, ): super().__init__() if len(features) != len(scale_factors) + 1: - raise ValueError("Incompatible number of features {len(features)} and scale_factors {len(scale_factors)}") + raise ValueError( + "Incompatible number of features {len(features)} and scale_factors" + " {len(scale_factors)}" + ) conv_kwargs = [conv_block_kwargs] * len(scale_factors) if anisotropic_kernel: conv_kwargs = [ - _update_conv_kwargs(kwargs, scale_factor) for kwargs, scale_factor in zip(conv_kwargs, scale_factors) + _update_conv_kwargs(kwargs, scale_factor) + for kwargs, scale_factor in zip(conv_kwargs, scale_factors) ] self.blocks = nn.ModuleList( @@ -147,16 +164,26 @@ def forward(self, x): class Decoder(nn.Module): def __init__( - self, features, scale_factors, conv_block_impl, sampler_impl, anisotropic_kernel=False, **conv_block_kwargs + self, + features, + scale_factors, + conv_block_impl, + sampler_impl, + anisotropic_kernel=False, + **conv_block_kwargs, ): super().__init__() if len(features) != len(scale_factors) + 1: - raise ValueError("Incompatible number of features {len(features)} and scale_factors {len(scale_factors)}") + raise ValueError( + "Incompatible number of features {len(features)} and scale_factors" + " {len(scale_factors)}" + ) conv_kwargs = [conv_block_kwargs] * len(scale_factors) if anisotropic_kernel: conv_kwargs = [ - _update_conv_kwargs(kwargs, scale_factor) for kwargs, scale_factor in zip(conv_kwargs, scale_factors) + _update_conv_kwargs(kwargs, scale_factor) + for kwargs, scale_factor in zip(conv_kwargs, scale_factors) ] self.blocks = nn.ModuleList( @@ -166,7 +193,10 @@ def __init__( ] ) self.samplers = nn.ModuleList( - [sampler_impl(factor, inc, outc) for factor, inc, outc in zip(scale_factors, features[:-1], features[1:])] + [ + sampler_impl(factor, inc, outc) + for factor, inc, outc in zip(scale_factors, features[:-1], features[1:]) + ] ) self.return_outputs = False @@ -192,10 +222,15 @@ def _concat(self, x1, x2): def forward(self, x, encoder_inputs): if len(encoder_inputs) != len(self.blocks): - raise ValueError(f"Invalid number of encoder_inputs: expect {len(self.blocks)}, got {len(encoder_inputs)}") + raise ValueError( + f"Invalid number of encoder_inputs: expect {len(self.blocks)}, got" + f" {len(encoder_inputs)}" + ) decoder_out = [] - for block, sampler, from_encoder in zip(self.blocks, self.samplers, encoder_inputs): + for block, sampler, from_encoder in zip( + self.blocks, self.samplers, encoder_inputs + ): x = sampler(x) x = block(self._concat(x, from_encoder)) decoder_out.append(x) @@ -216,11 +251,16 @@ def get_norm_layer(norm, dim, channels, n_groups=32): elif norm == "BatchNorm": return nn.BatchNorm2d(channels) if dim == 2 else nn.BatchNorm3d(channels) else: - raise ValueError(f"Invalid norm: expect one of 'InstanceNorm', 'BatchNorm' or 'GroupNorm', got {norm}") + raise ValueError( + "Invalid norm: expect one of 'InstanceNorm', 'BatchNorm' or 'GroupNorm'," + f" got {norm}" + ) class ConvBlock(nn.Module): - def __init__(self, in_channels, out_channels, dim, kernel_size=3, padding=1, norm=None): + def __init__( + self, in_channels, out_channels, dim, kernel_size=3, padding=1, norm=None + ): super().__init__() self.in_channels = in_channels self.out_channels = out_channels @@ -229,18 +269,26 @@ def __init__(self, in_channels, out_channels, dim, kernel_size=3, padding=1, nor if norm is None: self.block = nn.Sequential( - conv(in_channels, out_channels, kernel_size=kernel_size, padding=padding), + conv( + in_channels, out_channels, kernel_size=kernel_size, padding=padding + ), nn.ReLU(inplace=True), - conv(out_channels, out_channels, kernel_size=kernel_size, padding=padding), + conv( + out_channels, out_channels, kernel_size=kernel_size, padding=padding + ), nn.ReLU(inplace=True), ) else: self.block = nn.Sequential( get_norm_layer(norm, dim, in_channels), - conv(in_channels, out_channels, kernel_size=kernel_size, padding=padding), + conv( + in_channels, out_channels, kernel_size=kernel_size, padding=padding + ), nn.ReLU(inplace=True), get_norm_layer(norm, dim, out_channels), - conv(out_channels, out_channels, kernel_size=kernel_size, padding=padding), + conv( + out_channels, out_channels, kernel_size=kernel_size, padding=padding + ), nn.ReLU(inplace=True), ) @@ -258,7 +306,9 @@ def __init__(self, scale_factor, in_channels, out_channels, dim, mode): self.conv = conv(in_channels, out_channels, 1) def forward(self, x): - x = nn.functional.interpolate(x, scale_factor=self.scale_factor, mode=self.mode, align_corners=False) + x = nn.functional.interpolate( + x, scale_factor=self.scale_factor, mode=self.mode, align_corners=False + ) x = self.conv(x) return x @@ -293,7 +343,9 @@ def __init__( sampler_impl=Upsampler2d, **conv_block_kwargs, ): - features_encoder = [in_channels] + [initial_features * gain**i for i in range(depth)] + features_encoder = [in_channels] + [ + initial_features * gain**i for i in range(depth) + ] features_decoder = [initial_features * gain**i for i in range(depth + 1)][::-1] scale_factors = depth * [2] @@ -303,10 +355,17 @@ def __init__( if len(out_channels) != depth: raise ValueError() out_conv = nn.ModuleList( - [nn.Conv2d(feat, outc, 1) for feat, outc in zip(features_decoder[1:], out_channels)] + [ + nn.Conv2d(feat, outc, 1) + for feat, outc in zip(features_decoder[1:], out_channels) + ] ) else: - out_conv = None if out_channels is None else nn.Conv2d(features_decoder[-1], out_channels, 1) + out_conv = ( + None + if out_channels is None + else nn.Conv2d(features_decoder[-1], out_channels, 1) + ) super().__init__( encoder=Encoder( @@ -323,7 +382,9 @@ def __init__( sampler_impl=sampler_impl, **conv_block_kwargs, ), - base=conv_block_impl(features_encoder[-1], features_encoder[-1] * gain, **conv_block_kwargs), + base=conv_block_impl( + features_encoder[-1], features_encoder[-1] * gain, **conv_block_kwargs + ), out_conv=out_conv, final_activation=final_activation, ) diff --git a/example_descriptions/models/unet2d_multi_tensor/rdf_v0_4.yaml b/example_descriptions/models/unet2d_multi_tensor/rdf_v0_4.yaml new file mode 100644 index 000000000..e463a7fe5 --- /dev/null +++ b/example_descriptions/models/unet2d_multi_tensor/rdf_v0_4.yaml @@ -0,0 +1,73 @@ +authors: + - { name: Constantin Pape, github_user: constantinpape } +cite: + - { text: training library, doi: "10.5281/zenodo.5108853" } + - { text: architecture, doi: "10.1007/978-3-319-24574-4_28" } +covers: [cover.jpg] +dependencies: conda:environment.yaml +description: Multi tensor +documentation: documentation.md +format_version: 0.4.0 +git_repo: https://github.com/constantinpape/torch-em.git +inputs: + - axes: bcyx + data_range: [-.inf, .inf] + data_type: float32 + name: input0 + preprocessing: + - kwargs: { axes: cyx, mode: per_sample } + name: zero_mean_unit_variance + shape: + min: [1, 1, 32, 32] + step: [0, 0, 16, 16] + - axes: bcyx + data_range: [-.inf, .inf] + data_type: float32 + name: input1 + preprocessing: + - kwargs: { axes: cyx, mode: per_sample } + name: zero_mean_unit_variance + shape: + min: [1, 1, 32, 32] + step: [0, 0, 16, 16] +license: CC-BY-4.0 +links: [ilastik/ilastik] +name: Multi-tensor +outputs: + - axes: bcyx + data_range: [-.inf, .inf] + data_type: float32 + name: output0 + shape: + offset: [0, 0, 0, 0] + reference_tensor: input0 + scale: [1, 1, 1, 1] + - axes: bcyx + data_range: [-.inf, .inf] + data_type: float32 + name: output1 + shape: + offset: [0, 0, 0, 0] + reference_tensor: input1 + scale: [1, 1, 1, 1] +tags: [segmentation] +test_inputs: [test_input_0.npy, test_input_1.npy] +test_outputs: [test_output_0.npy, test_output_1.npy] +timestamp: "2021-09-13T15:55:34.193995" +type: model +weights: + onnx: + opset_version: 12 + sha256: 9b5bd88a3d29cf9979b30c03b4d5af12fdfa1d7193f5d2f2cc3942ffcf71ce3c + source: weights.onnx + parent: pytorch_state_dict + torchscript: + sha256: 097bb5062df1fe48a5e7473ea2f6025c77d334a9e3f92af79fc3d6d530c01720 + source: weights-torchscript.pt + parent: pytorch_state_dict + pytorch_state_dict: + architecture: multi_tensor_unet.py:MultiTensorUNet + architecture_sha256: 5e3d36b5187b85d5c935f2efde7cafe293dbffa413618f49a0744bf1be75c22b + kwargs: { depth: 3, in_channels: 2, initial_features: 16, out_channels: 2 } + sha256: c498522b3f2b02429b41fe9dbcb722ce0d7ad4cae7fcf8059cee27857ae49b00 + source: weights.pt diff --git a/example_specs/models/unet2d_multi_tensor/test_input_0.npy b/example_descriptions/models/unet2d_multi_tensor/test_input_0.npy similarity index 100% rename from example_specs/models/unet2d_multi_tensor/test_input_0.npy rename to example_descriptions/models/unet2d_multi_tensor/test_input_0.npy diff --git a/example_specs/models/unet2d_multi_tensor/test_input_1.npy b/example_descriptions/models/unet2d_multi_tensor/test_input_1.npy similarity index 100% rename from example_specs/models/unet2d_multi_tensor/test_input_1.npy rename to example_descriptions/models/unet2d_multi_tensor/test_input_1.npy diff --git a/example_specs/models/unet2d_multi_tensor/test_output_0.npy b/example_descriptions/models/unet2d_multi_tensor/test_output_0.npy similarity index 100% rename from example_specs/models/unet2d_multi_tensor/test_output_0.npy rename to example_descriptions/models/unet2d_multi_tensor/test_output_0.npy diff --git a/example_specs/models/unet2d_multi_tensor/test_output_1.npy b/example_descriptions/models/unet2d_multi_tensor/test_output_1.npy similarity index 100% rename from example_specs/models/unet2d_multi_tensor/test_output_1.npy rename to example_descriptions/models/unet2d_multi_tensor/test_output_1.npy diff --git a/example_specs/models/unet2d_multi_tensor/weights-torchscript.pt b/example_descriptions/models/unet2d_multi_tensor/weights-torchscript.pt similarity index 100% rename from example_specs/models/unet2d_multi_tensor/weights-torchscript.pt rename to example_descriptions/models/unet2d_multi_tensor/weights-torchscript.pt diff --git a/example_specs/models/unet2d_multi_tensor/weights.onnx b/example_descriptions/models/unet2d_multi_tensor/weights.onnx similarity index 100% rename from example_specs/models/unet2d_multi_tensor/weights.onnx rename to example_descriptions/models/unet2d_multi_tensor/weights.onnx diff --git a/example_specs/models/unet2d_multi_tensor/weights.pt b/example_descriptions/models/unet2d_multi_tensor/weights.pt similarity index 100% rename from example_specs/models/unet2d_multi_tensor/weights.pt rename to example_descriptions/models/unet2d_multi_tensor/weights.pt diff --git a/example_specs/models/unet2d_nuclei_broad/README.md b/example_descriptions/models/unet2d_nuclei_broad/README.md similarity index 100% rename from example_specs/models/unet2d_nuclei_broad/README.md rename to example_descriptions/models/unet2d_nuclei_broad/README.md diff --git a/example_descriptions/models/unet2d_nuclei_broad/bioimageio.yaml b/example_descriptions/models/unet2d_nuclei_broad/bioimageio.yaml new file mode 100644 index 000000000..8748782df --- /dev/null +++ b/example_descriptions/models/unet2d_nuclei_broad/bioimageio.yaml @@ -0,0 +1,120 @@ +type: model +format_version: 0.5.0 + +name: UNet 2D Nuclei Broad +description: A 2d U-Net trained on the nuclei broad dataset. +authors: + - name: "Constantin Pape;@bioimage-io" + affiliation: "EMBL Heidelberg" + orcid: "0000-0001-6562-7187" + - name: "Fynn Beuttenmueller" + affiliation: "EMBL Heidelberg" + orcid: "0000-0002-8567-6389" +maintainers: + - name: "Constantin Pape" + github_user: constantinpape + - name: "Fynn Beuttenmueller" + github_user: fynnbe + +# we allow for multiple citations. Each citation contains TEXT and a DOI and/or URL. +cite: + - text: "Ronneberger, Olaf et al. U-net: Convolutional networks for biomedical image segmentation. MICCAI 2015." + doi: 10.1007/978-3-319-24574-4_28 + - text: "2018 Data Science Bowl" + url: https://www.kaggle.com/c/data-science-bowl-2018 + +git_repo: https://github.com/bioimage-io/spec-bioimage-io/tree/main/example_specs/models/unet2d_nuclei_broad +tags: [unet2d, pytorch, nucleus, segmentation, dsb2018] +license: MIT + +documentation: README.md # may also be a url +covers: [cover0.png] +attachments: [] +timestamp: 2019-12-11T12:22:32Z # ISO 8601 + +inputs: + - id: raw + description: raw input + axes: + - type: batch + - type: channel + channel_names: [raw_intensity] + - type: space # todo: scale/unit + id: y + size: 512 + - type: space + id: x + size: 512 + test_tensor: + source: test_input.npy + sha256: 92768a2e0f2643c80b57afa5494019a5aafe5b0d92fe36ae740763adf3594a57 + sample_tensor: + source: test_input.png + sha256: 8771f558305dd89f4a85fe659e8ef5e116c94d64e668dc23b9282fda3fe9cce8 + preprocessing: # list of preprocessing steps + - id: zero_mean_unit_variance # name of preprocessing step + kwargs: + axes: [x, y] + +outputs: + - id: probability + description: probability in [0,1] + data: + type: float32 + range: + - 0.0 + - 1.0 + axes: + - type: batch + - type: channel + channel_names: [probability] + - type: space + id: y + size: + tensor_id: raw + axis_id: y + halo: 32 + - type: space + size: + tensor_id: raw + axis_id: x + id: x + halo: 32 + test_tensor: + source: test_output.npy + sha256: 39925fb38bc74530902e44bf8e43a8bd0a9e3675aab758d89f47dc66cb9818ed + sample_tensor: + source: test_output.png + sha256: 7bce8b53bcd0a12487a61f953aafe0f3700652848980d1083964c5bcb9555eec + +weights: + pytorch_state_dict: + authors: + - name: "Constantin Pape;@bioimage-io" + affiliation: "EMBL Heidelberg" + orcid: "0000-0001-6562-7187" + sha256: e4d3885bccbe41cbf6c1d825f3cd2b707c7021ead5593156007e407a16b27cf2 + source: "https://zenodo.org/records/3446812/files/unet2d_weights.torch" + architecture: + callable: UNet2d + source: unet2d.py + sha256: 7cdd8332dc3e3735e71c328f81b63a9ac86c028f80522312484ca9a4027d4ce1 + kwargs: { input_channels: 1, output_channels: 1 } + dependencies: + source: environment.yaml + sha256: 2c92c4d3c66981651881cdae2a910488d2f395babb59e63701047669ef586de9 + pytorch_version: "1.5.1" + onnx: + sha256: f1f086d5e340f9d4d7001a1b62a2b835f9b87a2fb5452c4fe7d8cc821bdf539c + source: weights.onnx + opset_version: 12 + parent: pytorch_state_dict + torchscript: + sha256: 62fa1c39923bee7d58a192277e0dd58f2da9ee810662addadd0f44a3784d9210 + source: weights.pt + parent: pytorch_state_dict + pytorch_version: "1.5.1" + +training_data: + id: ilastik/covid_if_training_data # note: not the real training data + version_number: 1 diff --git a/example_specs/models/unet2d_nuclei_broad/cover0.png b/example_descriptions/models/unet2d_nuclei_broad/cover0.png similarity index 100% rename from example_specs/models/unet2d_nuclei_broad/cover0.png rename to example_descriptions/models/unet2d_nuclei_broad/cover0.png diff --git a/example_specs/models/unet2d_nuclei_broad/environment.yaml b/example_descriptions/models/unet2d_nuclei_broad/environment.yaml similarity index 73% rename from example_specs/models/unet2d_nuclei_broad/environment.yaml rename to example_descriptions/models/unet2d_nuclei_broad/environment.yaml index 4297c6caa..d8fdc2bea 100644 --- a/example_specs/models/unet2d_nuclei_broad/environment.yaml +++ b/example_descriptions/models/unet2d_nuclei_broad/environment.yaml @@ -1,5 +1,4 @@ -name: - unet2d_nuclei_broad +name: unet2d_nuclei_broad channels: - conda-forge - defaults diff --git a/example_specs/models/unet2d_nuclei_broad/rdf_expand_output_shape.yaml b/example_descriptions/models/unet2d_nuclei_broad/expand_output_shape_v0_4.bioimageio.yaml similarity index 50% rename from example_specs/models/unet2d_nuclei_broad/rdf_expand_output_shape.yaml rename to example_descriptions/models/unet2d_nuclei_broad/expand_output_shape_v0_4.bioimageio.yaml index 90b2c8b6a..8878226f7 100644 --- a/example_specs/models/unet2d_nuclei_broad/rdf_expand_output_shape.yaml +++ b/example_descriptions/models/unet2d_nuclei_broad/expand_output_shape_v0_4.bioimageio.yaml @@ -1,5 +1,5 @@ # TODO physical scale of the data -format_version: 0.4.6 +format_version: 0.4.10 name: UNet 2D Nuclei Broad description: A 2d U-Net trained on the nuclei broad dataset. @@ -14,11 +14,10 @@ maintainers: - name: "Constantin Pape" github_user: constantinpape - # we allow for multiple citations. Each citation contains TEXT, DOI and URL. One of DOI or URL needs to be given. cite: - text: "Ronneberger, Olaf et al. U-net: Convolutional networks for biomedical image segmentation. MICCAI 2015." - doi: https://doi.org/10.1007/978-3-319-24574-4_28 + doi: 10.1007/978-3-319-24574-4_28 - text: "2018 Data Science Bowl" url: https://www.kaggle.com/c/data-science-bowl-2018 @@ -26,23 +25,23 @@ git_repo: https://github.com/bioimage-io/spec-bioimage-io/tree/main/example_spec tags: [unet2d, pytorch, nucleus, segmentation, dsb2018] license: MIT -documentation: README.md # may also be a url +documentation: README.md # may also be a url covers: [cover0.png] attachments: {} -timestamp: 2019-12-11T12:22:32Z # ISO 8601 +timestamp: 2019-12-11T12:22:32Z # ISO 8601 inputs: - name: raw description: raw input - axes: bcyx # letters of axes in btczyx + axes: bcyx # letters of axes in btczyx data_type: float32 data_range: [-.inf, .inf] shape: [1, 1, 512, 512] - preprocessing: # list of preprocessing steps - - name: zero_mean_unit_variance # name of preprocessing step + preprocessing: # list of preprocessing steps + - name: zero_mean_unit_variance # name of preprocessing step kwargs: - mode: per_sample # mode in [fixed, per_dataset, per_sample] - axes: yx # subset of axes to normalize jointly, batch ('b') is not a valid axis key here! + mode: per_sample # mode in [fixed, per_dataset, per_sample] + axes: yx # subset of axes to normalize jointly, batch ('b') is not a valid axis key here! outputs: - name: probability @@ -53,31 +52,28 @@ outputs: halo: [0, 0, 0, 32, 32] shape: reference_tensor: raw - scale: [1.0, 1.0, null, 1.0, 1.0] - offset: [0.0, 0.0, 0.5, 0.0, 0.0] + scale: [1.0, 0.0, null, 1.0, 1.0] + offset: [0.0, 0.5, 0.5, 0.0, 0.0] test_inputs: [test_input.npy] -test_outputs: [test_output.npy] - -sample_inputs: [test_input.npy] -sample_outputs: [test_output_expanded.npy] +test_outputs: [test_output_expanded.npy] weights: - pytorch_state_dict: - authors: - - name: "Constantin Pape;@bioimage-io" - affiliation: "EMBL Heidelberg" - orcid: "0000-0001-6562-7187" - sha256: e4d3885bccbe41cbf6c1d825f3cd2b707c7021ead5593156007e407a16b27cf2 - source: https://zenodo.org/record/3446812/files/unet2d_weights.torch - architecture: unet2d_expand_output_shape.py:UNet2d - architecture_sha256: cf42a6d86adeb4eb6e8e37b539a20e5413866b183bed88f4e2e26ad1639761ed - kwargs: {input_channels: 1, output_channels: 1} - dependencies: conda:environment.yaml + pytorch_state_dict: + authors: + - name: "Constantin Pape;@bioimage-io" + affiliation: "EMBL Heidelberg" + orcid: "0000-0001-6562-7187" + sha256: e4d3885bccbe41cbf6c1d825f3cd2b707c7021ead5593156007e407a16b27cf2 + source: https://zenodo.org/records/3446812/files/unet2d_weights.torch + architecture: unet2d_expand_output_shape.py:UNet2d + architecture_sha256: 80a886acc734f848a8e018d8063cfd7e003d7e20076583b28326bfdd6136be32 + kwargs: { input_channels: 1, output_channels: 1 } + dependencies: conda:environment.yaml type: model -version: 0.1.3 -download_url: https://example.com # note: not recommended for model RDFs +version: 1 +download_url: https://example.com/ # note: not recommended for model RDFs training_data: - id: ilastik/covid_if_training_data # note: not the real training data + id: ilastik/covid_if_training_data # note: not the real training data diff --git a/example_descriptions/models/unet2d_nuclei_broad/invalid_v0_4_0_duplicate_tensor_names.bioimageio.yaml b/example_descriptions/models/unet2d_nuclei_broad/invalid_v0_4_0_duplicate_tensor_names.bioimageio.yaml new file mode 100644 index 000000000..95f8ad126 --- /dev/null +++ b/example_descriptions/models/unet2d_nuclei_broad/invalid_v0_4_0_duplicate_tensor_names.bioimageio.yaml @@ -0,0 +1,79 @@ +# TODO physical scale of the data +format_version: 0.4.0 + +name: UNet 2D Nuclei Broad +description: A 2d U-Net trained on the nuclei broad dataset. +authors: + - name: "Constantin Pape;@bioimage-io" + affiliation: "EMBL Heidelberg" + orcid: "0000-0001-6562-7187" + - name: "Fynn Beuttenmueller" + affiliation: "EMBL Heidelberg" + orcid: "0000-0002-8567-6389" + +# we allow for multiple citations. Each citation contains TEXT, DOI and URL. One of DOI or URL needs to be given. +cite: + - text: "Ronneberger, Olaf et al. U-net: Convolutional networks for biomedical image segmentation. MICCAI 2015." + doi: 10.1007/978-3-319-24574-4_28 + - text: "2018 Data Science Bowl" + url: https://www.kaggle.com/c/data-science-bowl-2018 + +git_repo: https://github.com/bioimage-io/spec-bioimage-io/tree/main/example_specs/models/unet2d_nuclei_broad +tags: [unet2d, pytorch, nucleus, segmentation, dsb2018] +license: MIT + +documentation: README.md +covers: [cover0.png] +attachments: {} +timestamp: 2019-12-11T12:22:32Z # ISO 8601 + +inputs: + - name: data + description: raw input + axes: bcyx # letters of axes in btczyx + data_type: float32 + data_range: [-.inf, .inf] + shape: [1, 1, 512, 512] + preprocessing: # list of preprocessing steps + - name: zero_mean_unit_variance # name of preprocessing step + kwargs: + mode: per_sample # mode in [fixed, per_dataset, per_sample] + axes: yx # subset of axes to normalize jointly, batch ('b') is not a valid axis key here! + +outputs: + - name: data # <<--------------------------------------- invalid as name 'data' is used in inputs already!!! + description: probability in [0,1] + axes: bcyx + data_type: float32 + data_range: [-.inf, .inf] + halo: [0, 0, 32, 32] + shape: [1, 1, 512, 512] + +dependencies: conda:environment.yaml + +test_inputs: [test_input.npy] +test_outputs: [test_output.npy] + +weights: + pytorch_state_dict: + authors: + - name: "Constantin Pape;@bioimage-io" + affiliation: "EMBL Heidelberg" + orcid: "0000-0001-6562-7187" + sha256: e4d3885bccbe41cbf6c1d825f3cd2b707c7021ead5593156007e407a16b27cf2 + source: https://zenodo.org/records/3446812/files/unet2d_weights.torch + architecture: unet2d.py:UNet2d + architecture_sha256: 7cdd8332dc3e3735e71c328f81b63a9ac86c028f80522312484ca9a4027d4ce1 + kwargs: { input_channels: 1, output_channels: 1 } + onnx: + sha256: f1f086d5e340f9d4d7001a1b62a2b835f9b87a2fb5452c4fe7d8cc821bdf539c + source: weights.onnx + opset_version: 12 + parent: pytorch_state_dict + torchscript: + sha256: 62fa1c39923bee7d58a192277e0dd58f2da9ee810662addadd0f44a3784d9210 + source: weights.pt + parent: pytorch_state_dict + +type: model +version: 0.1.3 diff --git a/example_specs/models/unet2d_nuclei_broad/rdf.yaml b/example_descriptions/models/unet2d_nuclei_broad/rdf.yaml.wip similarity index 51% rename from example_specs/models/unet2d_nuclei_broad/rdf.yaml rename to example_descriptions/models/unet2d_nuclei_broad/rdf.yaml.wip index 3a0e7413c..0f5df3ca4 100644 --- a/example_specs/models/unet2d_nuclei_broad/rdf.yaml +++ b/example_descriptions/models/unet2d_nuclei_broad/rdf.yaml.wip @@ -1,26 +1,26 @@ -# TODO physical scale of the data format_version: 0.4.9 name: UNet 2D Nuclei Broad description: A 2d U-Net trained on the nuclei broad dataset. authors: - - name: "Constantin Pape;@bioimage-io" - affiliation: "EMBL Heidelberg" - orcid: "0000-0001-6562-7187" - - name: "Fynn Beuttenmueller" - affiliation: "EMBL Heidelberg" - orcid: "0000-0002-8567-6389" +- name: "Constantin Pape;@bioimage-io" + affiliation: "EMBL Heidelberg" + orcid: "0000-0001-6562-7187" +- name: "Fynn Beuttenmueller" + affiliation: "EMBL Heidelberg" + orcid: "0000-0002-8567-6389" maintainers: - - name: "Constantin Pape" - github_user: constantinpape - +- name: "Constantin Pape" + github_user: constantinpape +- name: "Fynn Beuttenmueller" + github_user: fynnbe # we allow for multiple citations. Each citation contains TEXT, DOI and URL. One of DOI or URL needs to be given. cite: - - text: "Ronneberger, Olaf et al. U-net: Convolutional networks for biomedical image segmentation. MICCAI 2015." - doi: https://doi.org/10.1007/978-3-319-24574-4_28 - - text: "2018 Data Science Bowl" - url: https://www.kaggle.com/c/data-science-bowl-2018 +- text: "Ronneberger, Olaf et al. U-net: Convolutional networks for biomedical image segmentation. MICCAI 2015." + doi: https://doi.org/10.1007/978-3-319-24574-4_28 +- text: "2018 Data Science Bowl" + url: https://www.kaggle.com/c/data-science-bowl-2018 git_repo: https://github.com/bioimage-io/spec-bioimage-io/tree/main/example_specs/models/unet2d_nuclei_broad tags: [unet2d, pytorch, nucleus, segmentation, dsb2018] @@ -32,35 +32,37 @@ attachments: {} timestamp: 2019-12-11T12:22:32Z # ISO 8601 inputs: - - name: raw - description: raw input - axes: bcyx # letters of axes in btczyx - data_type: float32 - data_range: [-.inf, .inf] - shape: [1, 1, 512, 512] - preprocessing: # list of preprocessing steps - - name: zero_mean_unit_variance # name of preprocessing step - kwargs: - mode: per_sample # mode in [fixed, per_dataset, per_sample] - axes: yx # subset of axes to normalize jointly, batch ('b') is not a valid axis key here! +- name: raw + description: raw input + shape: [1, 1, 512, 512] + axes: + - role: batch + - role: channel + - role: y + - role: x + data_type: float32 + data_range: [-.inf, .inf] + test_tensor: test_input.npy + sample_tensor: test_input.npy + preprocessing: # list of preprocessing steps + - name: zero_mean_unit_variance # name of preprocessing step + kwargs: + mode: per_sample # mode in [fixed, per_dataset, per_sample] + axes: yx # subset of axes to normalize jointly, batch ('b') is not a valid axis key here! outputs: - - name: probability - description: probability in [0,1] - axes: bcyx - data_type: float32 - data_range: [-.inf, .inf] - halo: [0, 0, 32, 32] - shape: - reference_tensor: raw - scale: [1.0, 1.0, 1.0, 1.0] - offset: [0.0, 0.0, 0.0, 0.0] - -test_inputs: [test_input.npy] -test_outputs: [test_output.npy] - -sample_inputs: [test_input.npy] -sample_outputs: [test_output.npy] +- name: probability + description: probability in [0,1] + axes: bcyx + data_type: float32 + data_range: [-.inf, .inf] + halo: [0, 0, 32, 32] + test_tensor: test_output.npy + sample_tensor: test_output.npy + shape: + reference_tensor: raw + scale: [1.0, 1.0, 1.0, 1.0] + offset: [0.0, 0.0, 0.0, 0.0] weights: pytorch_state_dict: @@ -86,7 +88,7 @@ weights: type: model version: 0.1.3 -download_url: https://example.com # note: not recommended for model RDFs +download_url: https://example.com/ # note: not recommended for model RDFs training_data: id: ilastik/covid_if_training_data # note: not the real training data diff --git a/example_descriptions/models/unet2d_nuclei_broad/test_input.npy b/example_descriptions/models/unet2d_nuclei_broad/test_input.npy new file mode 100644 index 000000000..31305f65a Binary files /dev/null and b/example_descriptions/models/unet2d_nuclei_broad/test_input.npy differ diff --git a/example_descriptions/models/unet2d_nuclei_broad/test_input.png b/example_descriptions/models/unet2d_nuclei_broad/test_input.png new file mode 100644 index 000000000..8aa08ee7e Binary files /dev/null and b/example_descriptions/models/unet2d_nuclei_broad/test_input.png differ diff --git a/example_specs/models/unet2d_nuclei_broad/test_output.npy b/example_descriptions/models/unet2d_nuclei_broad/test_output.npy similarity index 100% rename from example_specs/models/unet2d_nuclei_broad/test_output.npy rename to example_descriptions/models/unet2d_nuclei_broad/test_output.npy diff --git a/example_descriptions/models/unet2d_nuclei_broad/test_output.png b/example_descriptions/models/unet2d_nuclei_broad/test_output.png new file mode 100644 index 000000000..5cebf2a43 Binary files /dev/null and b/example_descriptions/models/unet2d_nuclei_broad/test_output.png differ diff --git a/example_specs/models/unet2d_nuclei_broad/test_output_expanded.npy b/example_descriptions/models/unet2d_nuclei_broad/test_output_expanded.npy similarity index 100% rename from example_specs/models/unet2d_nuclei_broad/test_output_expanded.npy rename to example_descriptions/models/unet2d_nuclei_broad/test_output_expanded.npy diff --git a/example_specs/collections/unet2d_nuclei_broad_coll/unet2d_.py b/example_descriptions/models/unet2d_nuclei_broad/unet2d.py similarity index 72% rename from example_specs/collections/unet2d_nuclei_broad_coll/unet2d_.py rename to example_descriptions/models/unet2d_nuclei_broad/unet2d.py index fedc9be10..7fba44dd5 100644 --- a/example_specs/collections/unet2d_nuclei_broad_coll/unet2d_.py +++ b/example_descriptions/models/unet2d_nuclei_broad/unet2d.py @@ -1,3 +1,4 @@ +# type: ignore import torch import torch.nn as nn @@ -9,7 +10,9 @@ def __init__(self, scale_factor, mode="bilinear"): self.mode = mode def forward(self, input): - return nn.functional.interpolate(input, scale_factor=self.scale_factor, mode=self.mode, align_corners=False) + return nn.functional.interpolate( + input, scale_factor=self.scale_factor, mode=self.mode, align_corners=False + ) class UNet2d(nn.Module): @@ -20,14 +23,22 @@ def __init__(self, input_channels, output_channels, training=False): self.n_levels = 3 self.encoders = nn.ModuleList( - [self.conv_layer(self.input_channels, 16), self.conv_layer(16, 32), self.conv_layer(32, 64)] + [ + self.conv_layer(self.input_channels, 16), + self.conv_layer(16, 32), + self.conv_layer(32, 64), + ] ) self.downsamplers = nn.ModuleList([self.downsampler()] * self.n_levels) self.base = self.conv_layer(64, 128) - self.decoders = nn.ModuleList([self.conv_layer(128, 64), self.conv_layer(64, 32), self.conv_layer(32, 16)]) - self.upsamplers = nn.ModuleList([self.upsampler(128, 64), self.upsampler(64, 32), self.upsampler(32, 16)]) + self.decoders = nn.ModuleList( + [self.conv_layer(128, 64), self.conv_layer(64, 32), self.conv_layer(32, 16)] + ) + self.upsamplers = nn.ModuleList( + [self.upsampler(128, 64), self.upsampler(64, 32), self.upsampler(32, 16)] + ) self.output = nn.Conv2d(16, self.output_channels, 1) self.training = training @@ -58,7 +69,9 @@ def forward(self, input): x = self.base(x) - for decoder, sampler, enc in zip(self.decoders, self.upsamplers, from_encoder[::-1]): + for decoder, sampler, enc in zip( + self.decoders, self.upsamplers, from_encoder[::-1] + ): x = sampler(x) x = torch.cat([enc, x], dim=1) x = decoder(x) diff --git a/example_specs/models/unet2d_nuclei_broad/unet2d_expand_output_shape.py b/example_descriptions/models/unet2d_nuclei_broad/unet2d_expand_output_shape.py similarity index 73% rename from example_specs/models/unet2d_nuclei_broad/unet2d_expand_output_shape.py rename to example_descriptions/models/unet2d_nuclei_broad/unet2d_expand_output_shape.py index 9a497bd8b..911ca039f 100644 --- a/example_specs/models/unet2d_nuclei_broad/unet2d_expand_output_shape.py +++ b/example_descriptions/models/unet2d_nuclei_broad/unet2d_expand_output_shape.py @@ -1,3 +1,4 @@ +# type: ignore import torch import torch.nn as nn @@ -9,7 +10,9 @@ def __init__(self, scale_factor, mode="bilinear"): self.mode = mode def forward(self, input): - return nn.functional.interpolate(input, scale_factor=self.scale_factor, mode=self.mode, align_corners=False) + return nn.functional.interpolate( + input, scale_factor=self.scale_factor, mode=self.mode, align_corners=False + ) class UNet2d(nn.Module): @@ -20,14 +23,22 @@ def __init__(self, input_channels, output_channels, training=False): self.n_levels = 3 self.encoders = nn.ModuleList( - [self.conv_layer(self.input_channels, 16), self.conv_layer(16, 32), self.conv_layer(32, 64)] + [ + self.conv_layer(self.input_channels, 16), + self.conv_layer(16, 32), + self.conv_layer(32, 64), + ] ) self.downsamplers = nn.ModuleList([self.downsampler()] * self.n_levels) self.base = self.conv_layer(64, 128) - self.decoders = nn.ModuleList([self.conv_layer(128, 64), self.conv_layer(64, 32), self.conv_layer(32, 16)]) - self.upsamplers = nn.ModuleList([self.upsampler(128, 64), self.upsampler(64, 32), self.upsampler(32, 16)]) + self.decoders = nn.ModuleList( + [self.conv_layer(128, 64), self.conv_layer(64, 32), self.conv_layer(32, 16)] + ) + self.upsamplers = nn.ModuleList( + [self.upsampler(128, 64), self.upsampler(64, 32), self.upsampler(32, 16)] + ) self.output = nn.Conv2d(16, self.output_channels, 1) self.training = training @@ -58,7 +69,9 @@ def forward(self, input): x = self.base(x) - for decoder, sampler, enc in zip(self.decoders, self.upsamplers, from_encoder[::-1]): + for decoder, sampler, enc in zip( + self.decoders, self.upsamplers, from_encoder[::-1] + ): x = sampler(x) x = torch.cat([enc, x], dim=1) x = decoder(x) diff --git a/example_specs/models/unet2d_nuclei_broad/rdf_v0_4_0.yaml b/example_descriptions/models/unet2d_nuclei_broad/v0_4_0.bioimageio.yaml similarity index 50% rename from example_specs/models/unet2d_nuclei_broad/rdf_v0_4_0.yaml rename to example_descriptions/models/unet2d_nuclei_broad/v0_4_0.bioimageio.yaml index 204535772..a80032945 100644 --- a/example_specs/models/unet2d_nuclei_broad/rdf_v0_4_0.yaml +++ b/example_descriptions/models/unet2d_nuclei_broad/v0_4_0.bioimageio.yaml @@ -14,11 +14,10 @@ maintainers: - name: "Constantin Pape" github_user: constantinpape - # we allow for multiple citations. Each citation contains TEXT, DOI and URL. One of DOI or URL needs to be given. cite: - text: "Ronneberger, Olaf et al. U-net: Convolutional networks for biomedical image segmentation. MICCAI 2015." - doi: https://doi.org/10.1007/978-3-319-24574-4_28 + doi: 10.1007/978-3-319-24574-4_28 - text: "2018 Data Science Bowl" url: https://www.kaggle.com/c/data-science-bowl-2018 @@ -29,20 +28,20 @@ license: MIT documentation: README.md covers: [cover0.png] attachments: {} -timestamp: 2019-12-11T12:22:32Z # ISO 8601 +timestamp: 2019-12-11T12:22:32Z # ISO 8601 inputs: - name: raw description: raw input - axes: bcyx # letters of axes in btczyx + axes: bcyx # letters of axes in btczyx data_type: float32 data_range: [-.inf, .inf] shape: [1, 1, 512, 512] - preprocessing: # list of preprocessing steps - - name: zero_mean_unit_variance # name of preprocessing step + preprocessing: # list of preprocessing steps + - name: zero_mean_unit_variance # name of preprocessing step kwargs: - mode: per_sample # mode in [fixed, per_dataset, per_sample] - axes: yx # subset of axes to normalize jointly, batch ('b') is not a valid axis key here! + mode: per_sample # mode in [fixed, per_dataset, per_sample] + axes: yx # subset of axes to normalize jointly, batch ('b') is not a valid axis key here! outputs: - name: probability @@ -53,37 +52,37 @@ outputs: halo: [0, 0, 32, 32] shape: reference_tensor: raw - scale: [1.0, 1.0, 1.0, 1.0] - offset: [0.0, 0.0, 0.0, 0.0] + scale: [1.0, 0.0, 1.0, 1.0] + offset: [0.0, 0.5, 0.0, 0.0] dependencies: conda:environment.yaml test_inputs: [test_input.npy] test_outputs: [test_output.npy] -sample_inputs: [test_input.npy] -sample_outputs: [test_output.npy] +sample_inputs: [test_input.png] +sample_outputs: [test_output.png] weights: - pytorch_state_dict: - authors: - - name: "Constantin Pape;@bioimage-io" - affiliation: "EMBL Heidelberg" - orcid: "0000-0001-6562-7187" - sha256: e4d3885bccbe41cbf6c1d825f3cd2b707c7021ead5593156007e407a16b27cf2 - source: https://zenodo.org/record/3446812/files/unet2d_weights.torch - architecture: unet2d.py:UNet2d - architecture_sha256: cf42a6d86adeb4eb6e8e37b539a20e5413866b183bed88f4e2e26ad1639761ed - kwargs: {input_channels: 1, output_channels: 1} - onnx: - sha256: f1f086d5e340f9d4d7001a1b62a2b835f9b87a2fb5452c4fe7d8cc821bdf539c - source: weights.onnx - opset_version: 12 - parent: https://zenodo.org/record/3446812/files/unet2d_weights.torch - torchscript: - sha256: 62fa1c39923bee7d58a192277e0dd58f2da9ee810662addadd0f44a3784d9210 - source: weights.pt - parent: https://zenodo.org/record/3446812/files/unet2d_weights.torch + pytorch_state_dict: + authors: + - name: "Constantin Pape;@bioimage-io" + affiliation: "EMBL Heidelberg" + orcid: "0000-0001-6562-7187" + source: https://zenodo.org/records/3446812/files/unet2d_weights.torch + sha256: e4d3885bccbe41cbf6c1d825f3cd2b707c7021ead5593156007e407a16b27cf2 + architecture: unet2d.py:UNet2d + architecture_sha256: 7cdd8332dc3e3735e71c328f81b63a9ac86c028f80522312484ca9a4027d4ce1 + kwargs: { input_channels: 1, output_channels: 1 } + onnx: + source: weights.onnx + sha256: f1f086d5e340f9d4d7001a1b62a2b835f9b87a2fb5452c4fe7d8cc821bdf539c + opset_version: 12 + parent: pytorch_state_dict + torchscript: + source: weights.pt + sha256: 62fa1c39923bee7d58a192277e0dd58f2da9ee810662addadd0f44a3784d9210 + parent: pytorch_state_dict type: model version: 0.1.3 diff --git a/example_descriptions/models/unet2d_nuclei_broad/v0_4_9.bioimageio.yaml b/example_descriptions/models/unet2d_nuclei_broad/v0_4_9.bioimageio.yaml new file mode 100644 index 000000000..1b0375cbc --- /dev/null +++ b/example_descriptions/models/unet2d_nuclei_broad/v0_4_9.bioimageio.yaml @@ -0,0 +1,90 @@ +format_version: 0.4.9 + +name: UNet 2D Nuclei Broad +description: A 2d U-Net trained on the nuclei broad dataset. +authors: + - name: "Constantin Pape;@bioimage-io" + affiliation: "EMBL Heidelberg" + orcid: "0000-0001-6562-7187" + - name: "Fynn Beuttenmueller" + affiliation: "EMBL Heidelberg" + orcid: "0000-0002-8567-6389" +maintainers: + - name: "Constantin Pape" + github_user: constantinpape + +# we allow for multiple citations. Each citation contains TEXT, DOI and URL. One of DOI or URL needs to be given. +cite: + - text: "Ronneberger, Olaf et al. U-net: Convolutional networks for biomedical image segmentation. MICCAI 2015." + doi: 10.1007/978-3-319-24574-4_28 + - text: "2018 Data Science Bowl" + url: https://www.kaggle.com/c/data-science-bowl-2018 + +git_repo: https://github.com/bioimage-io/spec-bioimage-io/tree/main/example_specs/models/unet2d_nuclei_broad +tags: [unet2d, pytorch, nucleus, segmentation, dsb2018] +license: MIT + +documentation: README.md # may also be a url +covers: [cover0.png] +attachments: {} +timestamp: 2019-12-11T12:22:32Z # ISO 8601 + +inputs: + - name: raw + description: raw input + axes: bcyx # letters of axes in btczyx + data_type: float32 + data_range: [-.inf, .inf] + shape: [1, 1, 512, 512] + preprocessing: # list of preprocessing steps + - name: zero_mean_unit_variance # name of preprocessing step + kwargs: + mode: per_sample # mode in [fixed, per_dataset, per_sample] + axes: yx # subset of axes to normalize jointly, batch ('b') is not a valid axis key here! + +outputs: + - name: probability + description: probability in [0,1] + axes: bcyx + data_type: float32 + data_range: [-.inf, .inf] + halo: [0, 0, 32, 32] + shape: + reference_tensor: raw + scale: [1.0, 0.0, 1.0, 1.0] + offset: [0.0, 0.5, 0.0, 0.0] + +test_inputs: [test_input.npy] +test_outputs: [test_output.npy] + +sample_inputs: [test_input.png] +sample_outputs: [test_output.png] + +weights: + pytorch_state_dict: + authors: + - name: "Constantin Pape;@bioimage-io" + affiliation: "EMBL Heidelberg" + orcid: "0000-0001-6562-7187" + source: https://zenodo.org/records/3446812/files/unet2d_weights.torch + sha256: e4d3885bccbe41cbf6c1d825f3cd2b707c7021ead5593156007e407a16b27cf2 + architecture: unet2d.py:UNet2d + architecture_sha256: 7cdd8332dc3e3735e71c328f81b63a9ac86c028f80522312484ca9a4027d4ce1 + kwargs: { input_channels: 1, output_channels: 1 } + dependencies: conda:environment.yaml + onnx: + source: weights.onnx + sha256: f1f086d5e340f9d4d7001a1b62a2b835f9b87a2fb5452c4fe7d8cc821bdf539c + opset_version: 12 + parent: pytorch_state_dict + torchscript: + source: weights.pt + sha256: 62fa1c39923bee7d58a192277e0dd58f2da9ee810662addadd0f44a3784d9210 + parent: pytorch_state_dict + +type: model +version: 0.1.3 +download_url: https://example.com/ # note: not recommended for model RDFs + +training_data: + id: ilastik/covid_if_training_data # note: not the real training data diff --git a/example_specs/models/unet2d_nuclei_broad/weights.onnx b/example_descriptions/models/unet2d_nuclei_broad/weights.onnx similarity index 100% rename from example_specs/models/unet2d_nuclei_broad/weights.onnx rename to example_descriptions/models/unet2d_nuclei_broad/weights.onnx diff --git a/example_specs/models/unet2d_nuclei_broad/weights.pt b/example_descriptions/models/unet2d_nuclei_broad/weights.pt similarity index 100% rename from example_specs/models/unet2d_nuclei_broad/weights.pt rename to example_descriptions/models/unet2d_nuclei_broad/weights.pt diff --git a/example_descriptions/models/upsample_test_model/docs.md b/example_descriptions/models/upsample_test_model/docs.md new file mode 100644 index 000000000..e69de29bb diff --git a/example_specs/models/upsample_test_model/test_input.npy b/example_descriptions/models/upsample_test_model/test_input.npy similarity index 100% rename from example_specs/models/upsample_test_model/test_input.npy rename to example_descriptions/models/upsample_test_model/test_input.npy diff --git a/example_specs/models/upsample_test_model/test_output.npy b/example_descriptions/models/upsample_test_model/test_output.npy similarity index 100% rename from example_specs/models/upsample_test_model/test_output.npy rename to example_descriptions/models/upsample_test_model/test_output.npy diff --git a/example_specs/models/upsample_test_model/upsample_model.py b/example_descriptions/models/upsample_test_model/upsample_model.py similarity index 100% rename from example_specs/models/upsample_test_model/upsample_model.py rename to example_descriptions/models/upsample_test_model/upsample_model.py diff --git a/example_descriptions/models/upsample_test_model/v0_4.bioimageio.yaml b/example_descriptions/models/upsample_test_model/v0_4.bioimageio.yaml new file mode 100644 index 000000000..5bf53c30f --- /dev/null +++ b/example_descriptions/models/upsample_test_model/v0_4.bioimageio.yaml @@ -0,0 +1,40 @@ +authors: + - name: William Patton + - name: Fynn Beuttenmรผller + github_user: fynnbe +cite: + - { url: "https://example.com", text: Test } +covers: [] +description: Test model for scaling +documentation: docs.md +format_version: 0.4.10 +inputs: + - axes: bcyx + data_range: [-.inf, .inf] + data_type: float32 + name: input + shape: + min: [1, 1, 5, 5] + step: [0, 0, 5, 5] +license: CC-BY-4.0 +name: Test_scaling +outputs: + - axes: bcyx + data_range: [-.inf, .inf] + data_type: float32 + halo: [0, 0, 0, 0] + name: output + shape: + offset: [0.0, 0.5, 0.0, 0.0] + reference_tensor: input + scale: [1.0, 0.0, 4.0, 4.0] +tags: [scaling, test] +test_inputs: [test_input.npy] +test_outputs: [test_output.npy] +timestamp: "2022-10-23T00:00:00Z" +type: model +weights: + pytorch_state_dict: + architecture: upsample_model.py:Upscaler + architecture_sha256: 944f960311e1eedc42072fe264cb6b4e544f53cda3da9e6525621e5e3d4ba69b + source: weights.pt diff --git a/example_specs/models/upsample_test_model/weights.pt b/example_descriptions/models/upsample_test_model/weights.pt similarity index 100% rename from example_specs/models/upsample_test_model/weights.pt rename to example_descriptions/models/upsample_test_model/weights.pt diff --git a/example_specs/collections/partner_collection/rdf.yaml b/example_specs/collections/partner_collection/rdf.yaml deleted file mode 100644 index fbd5a246b..000000000 --- a/example_specs/collections/partner_collection/rdf.yaml +++ /dev/null @@ -1,10 +0,0 @@ -format_version: 0.2.2 -type: collection -name: Partner Collection -tags: [bioimage.io, partner-software] -description: "Resources for BioImgage.IO curated by the partner team." -id: partner -collection: - - rdf_source: datasets/dummy-dataset/rdf.yaml - - diff --git a/example_specs/collections/unet2d_nuclei_broad_coll/rdf.yaml b/example_specs/collections/unet2d_nuclei_broad_coll/rdf.yaml deleted file mode 100644 index 15562db34..000000000 --- a/example_specs/collections/unet2d_nuclei_broad_coll/rdf.yaml +++ /dev/null @@ -1,119 +0,0 @@ -format_version: 0.2.2 -type: collection - -name: UNet 2D Nuclei Broad - -description: A 2d U-Net trained on the nuclei broad dataset. -authors: - - name: "Constantin Pape;@bioimage-io" - affiliation: "EMBL Heidelberg" - orcid: "0000-0001-6562-7187" - - name: "Fynn Beuttenmueller" - affiliation: "EMBL Heidelberg" - orcid: "0000-0002-8567-6389" -maintainers: - - name: "Constantin Pape" - github_user: constantinpape - - -cite: - - text: "Ronneberger, Olaf et al. U-net: Convolutional networks for biomedical image segmentation. MICCAI 2015." - doi: https://doi.org/10.1007/978-3-319-24574-4_28 - - text: "2018 Data Science Bowl" - url: https://www.kaggle.com/c/data-science-bowl-2018 - -git_repo: https://github.com/bioimage-io/spec-bioimage-io/tree/main/example_specs/models/unet2d_nuclei_broad -# tags: [unet2d, pytorch, nucleus, segmentation, dsb2018] # tags is optional for collection -license: MIT - -documentation: README.md -covers: [cover0.png] -attachments: {} -timestamp: 2019-12-11T12:22:32Z - -inputs: - - name: raw - description: raw input - axes: bcyx - data_type: float32 - data_range: [-.inf, .inf] - shape: [1, 1, 512, 512] - preprocessing: - - name: zero_mean_unit_variance - kwargs: - mode: per_sample - axes: yx - -outputs: - - name: probability - description: probability in [0,1] - axes: bcyx - data_type: float32 - data_range: [-.inf, .inf] - halo: [0, 0, 32, 32] - shape: - reference_tensor: raw - scale: [1.0, 1.0, 1.0, 1.0] - offset: [0.0, 0.0, 0.0, 0.0] - -test_inputs: [test_input.npy] -test_outputs: [test_output.npy] - -sample_inputs: [test_input.npy] -sample_outputs: [test_output.npy] - -version: 0.1.0 - -collection: -# - id: with_rdf_source_url -# rdf_source: https://raw.githubusercontent.com/bioimage-io/spec-bioimage-io/main/example_specs/models/unet2d_nuclei_broad/rdf.yaml -# name: UNet 2D Nuclei Broad (latest) - - id: in_place_0.4.0 - format_version: 0.4.0 - type: model - dependencies: conda:environment.yaml - weights: - pytorch_state_dict: - authors: - - name: "Constantin Pape;@bioimage-io" - affiliation: "EMBL Heidelberg" - orcid: "0000-0001-6562-7187" - sha256: e4d3885bccbe41cbf6c1d825f3cd2b707c7021ead5593156007e407a16b27cf2 - source: https://zenodo.org/record/3446812/files/unet2d_weights.torch - architecture: unet2d_.py:UNet2d - architecture_sha256: cf42a6d86adeb4eb6e8e37b539a20e5413866b183bed88f4e2e26ad1639761ed - kwargs: {input_channels: 1, output_channels: 1} - onnx: - sha256: 5bf14c4e65e8601ab551db99409ba7981ff0e501719bc2b0ee625ca9a9375b32 - source: weights.onnx - opset_version: 12 - parent: https://zenodo.org/record/3446812/files/unet2d_weights.torch - torchscript: - sha256: 62fa1c39923bee7d58a192277e0dd58f2da9ee810662addadd0f44a3784d9210 - source: weights.pt - parent: https://zenodo.org/record/3446812/files/unet2d_weights.torch - -# - id: in_place_0.4.1 -# format_version: 0.4.1 -# type: model -# weights: -# pytorch_state_dict: -# authors: -# - name: "Constantin Pape;@bioimage-io" -# affiliation: "EMBL Heidelberg" -# orcid: "0000-0001-6562-7187" -# sha256: e4d3885bccbe41cbf6c1d825f3cd2b707c7021ead5593156007e407a16b27cf2 -# source: https://zenodo.org/record/3446812/files/unet2d_weights.torch -# architecture: unet2d_.py:UNet2d -# architecture_sha256: cf42a6d86adeb4eb6e8e37b539a20e5413866b183bed88f4e2e26ad1639761ed -# kwargs: {input_channels: 1, output_channels: 1} -# dependencies: conda:environment.yaml -# onnx: -# sha256: 5bf14c4e65e8601ab551db99409ba7981ff0e501719bc2b0ee625ca9a9375b32 -# source: weights.onnx -# opset_version: 12 -# parent: https://zenodo.org/record/3446812/files/unet2d_weights.torch -# torchscript: -# sha256: 62fa1c39923bee7d58a192277e0dd58f2da9ee810662addadd0f44a3784d9210 -# source: weights.pt -# parent: https://zenodo.org/record/3446812/files/unet2d_weights.torch diff --git a/example_specs/datasets/covid_if_training_data/rdf.yaml b/example_specs/datasets/covid_if_training_data/rdf.yaml deleted file mode 100644 index 02c1efc59..000000000 --- a/example_specs/datasets/covid_if_training_data/rdf.yaml +++ /dev/null @@ -1,15 +0,0 @@ -authors: -- {name: Constantin Pape} -cite: -- {doi: 'https://doi.org/10.1002/bies.202000257', text: 'Pape, Remme et al.'} -covers: ['https://raw.githubusercontent.com/ilastik/bioimage-io-models/main/dataset_src/covid-if-cover0.jpg', - 'https://raw.githubusercontent.com/ilastik/bioimage-io-models/main/dataset_src/covid-if-cover1.jpg', - 'https://raw.githubusercontent.com/ilastik/bioimage-io-models/main/dataset_src/covid-if-cover2.jpg'] -description: Training data for cell and nucleus segmentation as well as infection classification in IF data of Covid-19 infected cells. -documentation: https://raw.githubusercontent.com/ilastik/bioimage-io-models/main/dataset_src/covid-if.md -format_version: 0.2.3 -license: CC-BY-4.0 -name: Covid-IF Training Data -source: https://zenodo.org/record/5092850 -tags: [high-content-imaging, fluorescence-light-microscopy, 2D, cells, nuclei, covid19, semantic-segmentation, instance-segmentation] -type: dataset diff --git a/example_specs/models/stardist_example_model/rdf.yaml b/example_specs/models/stardist_example_model/rdf.yaml deleted file mode 100644 index 29ef77f48..000000000 --- a/example_specs/models/stardist_example_model/rdf.yaml +++ /dev/null @@ -1,48 +0,0 @@ -authors: -- {name: Uwe Schmidt} -- {name: Martin Weigert} -cite: -- {text: Cell Detection with Star-Convex Polygons, url: 'https://doi.org/10.1007/978-3-030-00934-2_30'} -- {text: Star-convex Polyhedra for 3D Object Detection and Segmentation in Microscopy, - url: 'https://doi.org/10.1109/WACV45572.2020.9093435'} -config: - stardist: - stardist_version: 0.7.3 - thresholds: {nms: 0.3, prob: 0.479071463157368} -covers: [stardist_logo.jpg] -dependencies: pip:requirements.txt -description: StarDist -documentation: README.md -format_version: 0.4.0 -git_repo: https://github.com/stardist/stardist -inputs: -- axes: byxc - data_range: [-.inf, .inf] - data_type: uint16 - name: input - preprocessing: - - kwargs: {axes: yx, max_percentile: 99.8, min_percentile: 1.0, mode: per_sample} - name: scale_range - shape: - min: [1, 80, 80, 1] - step: [0, 16, 16, 0] -license: CC-BY-NC-4.0 -name: StardistExampleModel -outputs: -- axes: byxc - data_range: [-.inf, .inf] - data_type: float32 - halo: [0, 32, 32, 0] - name: output - shape: - offset: [0.0, 0.0, 0.0, 16.5] - reference_tensor: input - scale: [1.0, 1.0, 1.0, 0.0] -tags: [stardist, segmentation, instance segmentation, object detection, tensorflow] -test_inputs: [test_input.npy] -test_outputs: [test_output0.npy] -timestamp: '2021-11-22T13:14:30.643565' -type: model -weights: - tensorflow_saved_model_bundle: {sha256: 444cbbbaa2267c999ae321cf66d77025284079684b6c203aa8b846b3c7c70376, - source: TF_SavedModel.zip, tensorflow_version: '1.15'} diff --git a/example_specs/models/stardist_example_model/rdf_wrong_shape.yaml b/example_specs/models/stardist_example_model/rdf_wrong_shape.yaml deleted file mode 100644 index 443572863..000000000 --- a/example_specs/models/stardist_example_model/rdf_wrong_shape.yaml +++ /dev/null @@ -1,47 +0,0 @@ -authors: -- {name: Uwe Schmidt} -- {name: Martin Weigert} -cite: -- {text: Cell Detection with Star-Convex Polygons, url: 'https://doi.org/10.1007/978-3-030-00934-2_30'} -- {text: Star-convex Polyhedra for 3D Object Detection and Segmentation in Microscopy, - url: 'https://doi.org/10.1109/WACV45572.2020.9093435'} -config: - stardist: - stardist_version: 0.7.3 - thresholds: {nms: 0.3, prob: 0.479071463157368} -covers: [stardist_logo.jpg] -dependencies: pip:requirements.txt -description: This spec contains an incorrect output shape description and is included for test purposes. -documentation: README.md -format_version: 0.4.0 -git_repo: https://github.com/stardist/stardist -inputs: -- axes: byxc - data_range: [-.inf, .inf] - data_type: uint16 - name: input - preprocessing: - - kwargs: {axes: yx, max_percentile: 99.8, min_percentile: 1.0, mode: per_sample} - name: scale_range - shape: - min: [1, 80, 80, 1] - step: [0, 16, 16, 0] -license: CC-BY-NC-4.0 -name: StardistExampleModel -outputs: -- axes: byxc - data_range: [-.inf, .inf] - data_type: float32 - name: output - shape: - offset: [1, 1, 1, 33] # should be [0.0, 0.0, 0.0, 16.5] - reference_tensor: input - scale: [1.0, 1.0, 1.0, 0.0] -tags: [stardist, segmentation, instance segmentation, object detection, tensorflow] -test_inputs: [test_input.npy] -test_outputs: [test_output0.npy] -timestamp: '2021-11-22T13:14:30.643565' -type: model -weights: - tensorflow_saved_model_bundle: {sha256: 444cbbbaa2267c999ae321cf66d77025284079684b6c203aa8b846b3c7c70376, - source: TF_SavedModel.zip, tensorflow_version: '1.15'} diff --git a/example_specs/models/stardist_example_model/rdf_wrong_shape2.yaml b/example_specs/models/stardist_example_model/rdf_wrong_shape2.yaml deleted file mode 100644 index 820a2ba66..000000000 --- a/example_specs/models/stardist_example_model/rdf_wrong_shape2.yaml +++ /dev/null @@ -1,48 +0,0 @@ -authors: -- {name: Uwe Schmidt} -- {name: Martin Weigert} -cite: -- {text: Cell Detection with Star-Convex Polygons, url: 'https://doi.org/10.1007/978-3-030-00934-2_30'} -- {text: Star-convex Polyhedra for 3D Object Detection and Segmentation in Microscopy, - url: 'https://doi.org/10.1109/WACV45572.2020.9093435'} -config: - stardist: - stardist_version: 0.7.3 - thresholds: {nms: 0.3, prob: 0.479071463157368} -covers: [stardist_logo.jpg] -dependencies: pip:requirements.txt -description: StarDist -documentation: README.md -format_version: 0.4.0 -git_repo: https://github.com/stardist/stardist -inputs: -- axes: byxc - data_range: [-.inf, .inf] - data_type: uint16 - name: input - preprocessing: - - kwargs: {axes: yx, max_percentile: 99.8, min_percentile: 1.0, mode: per_sample} - name: scale_range - shape: - min: [1, 80, 80, 1] - step: [0, 17, 17, 0] -license: CC-BY-NC-4.0 -name: StardistExampleModel -outputs: -- axes: byxc - data_range: [-.inf, .inf] - data_type: float32 - halo: [0, 32, 32, 0] - name: output - shape: - offset: [0.0, 0.0, 0.0, 16.5] - reference_tensor: input - scale: [1.0, 1.0, 1.0, 0.0] -tags: [stardist, segmentation, instance segmentation, object detection, tensorflow] -test_inputs: [test_input.npy] -test_outputs: [test_output0.npy] -timestamp: '2021-11-22T13:14:30.643565' -type: model -weights: - tensorflow_saved_model_bundle: {sha256: 444cbbbaa2267c999ae321cf66d77025284079684b6c203aa8b846b3c7c70376, - source: TF_SavedModel.zip, tensorflow_version: '1.15'} diff --git a/example_specs/models/unet2d_diff_output_shape/rdf.yaml b/example_specs/models/unet2d_diff_output_shape/rdf.yaml deleted file mode 100644 index 553b63521..000000000 --- a/example_specs/models/unet2d_diff_output_shape/rdf.yaml +++ /dev/null @@ -1,46 +0,0 @@ -authors: -- {name: Constantin Pape; @constantinpape} -cite: -- {text: training library, url: 'https://doi.org/10.5281/zenodo.5108853'} -- {text: architecture, url: 'https://doi.org/10.1007/978-3-319-24574-4_28'} -covers: [cover.jpg] -dependencies: conda:./environment.yaml -description: diff-output-shape -documentation: ./documentation.md -format_version: 0.4.0 -git_repo: https://github.com/constantinpape/torch-em.git -inputs: -- axes: bcyx - data_range: [-.inf, .inf] - data_type: float32 - name: input - preprocessing: - - kwargs: {axes: cyx, mode: per_sample} - name: zero_mean_unit_variance - shape: - min: [1, 1, 32, 32] - step: [0, 0, 16, 16] -license: CC-BY-4.0 -links: [ilastik/ilastik] -name: diff-output-shape -outputs: -- axes: bcyx - data_range: [-.inf, .inf] - data_type: float32 - name: output - shape: - offset: [0, 0, 0, 0] - reference_tensor: input - scale: [1, 1, 0.5, 0.5] -tags: [segmentation] -test_inputs: [test_input.npy] -test_outputs: [test_output.npy] -timestamp: '2021-09-12T14:18:58.389834' -type: model -weights: - pytorch_state_dict: - architecture: ./resize_unet.py:ResizeUNet - architecture_sha256: bc9e7fe5dd5d3a6c7a4ef7d32b6704169f887b7632f898fc07c762eea7a3feb5 - kwargs: {depth: 3, in_channels: 1, initial_features: 16, out_channels: 1} - sha256: 2c475874f358eb75dc5f3b6af8d186e4fbf8da3acf43fb6662f0d5de21b0a838 - source: ./weights.pt diff --git a/example_specs/models/unet2d_fixed_shape/rdf.yaml b/example_specs/models/unet2d_fixed_shape/rdf.yaml deleted file mode 100644 index 77bb22094..000000000 --- a/example_specs/models/unet2d_fixed_shape/rdf.yaml +++ /dev/null @@ -1,44 +0,0 @@ -authors: -- {name: Constantin Pape; @constantinpape} -cite: -- {text: training library, url: 'https://doi.org/10.5281/zenodo.5108853'} -- {text: architecture, url: 'https://doi.org/10.1007/978-3-319-24574-4_28'} -covers: [cover.jpg] -dependencies: conda:./environment.yaml -description: fixed-shape -documentation: ./documentation.md -format_version: 0.3.2 -framework: pytorch -git_repo: https://github.com/constantinpape/torch-em.git -inputs: -- axes: bcyx - data_range: [-.inf, .inf] - data_type: float32 - name: input - preprocessing: - - kwargs: {axes: cyx, mode: per_sample} - name: zero_mean_unit_variance - shape: &id001 [1, 1, 256, 256] -kwargs: {depth: 3, final_activation: null, gain: 2, in_channels: 1, initial_features: 16, - out_channels: 1, postprocessing: null, return_side_outputs: false} -language: python -license: CC-BY-4.0 -links: [ilastik/ilastik] -name: fixed-shape -outputs: -- axes: bcyx - data_range: [-.inf, .inf] - data_type: float32 - halo: [0, 0, 8, 8] - name: output - shape: *id001 -sha256: 7f5b15948e8e2c91f78dcff34fbf30af517073e91ba487f3edb982b948d099b3 -source: unet.py:UNet2d -tags: [segmentation] -test_inputs: [test_input.npy] -test_outputs: [test_output.npy] -timestamp: '2021-09-12T14:04:52.211533' -type: model -weights: - pytorch_state_dict: {sha256: bcf26f08c26f5ef8f891c08dba2c8812bec7991643fb7c0f7b5d3e7dc321092f, - source: ./weights.pt} diff --git a/example_specs/models/unet2d_keras_tf/rdf.yaml b/example_specs/models/unet2d_keras_tf/rdf.yaml deleted file mode 100644 index 0c21dbefc..000000000 --- a/example_specs/models/unet2d_keras_tf/rdf.yaml +++ /dev/null @@ -1,59 +0,0 @@ -attachments: - files: [per_sample_scale_range.ijm] -authors: -- {affiliation: zerocost, name: zerocost} -cite: -- {text: Falk et al. Nature Methods 2019, url: 'https://doi.org/10.1038/s41592-018-0261-2'} -- {text: Ronneberger et al. arXiv in 2015, url: 'https://doi.org/10.1007/978-3-319-24574-4_28'} -- {text: Lucas von Chamier et al. biorXiv 2020, url: 'https://doi.org/10.1101/2020.03.20.000133'} -config: - deepimagej: - allow_tiling: true - model_keys: null - prediction: - postprocess: - - {spec: null} - preprocess: - - {kwargs: per_sample_scale_range.ijm, spec: 'ij.IJ::runMacroFile'} - pyramidal_model: false - test_information: - inputs: - - name: sample_input_0.tif - pixel_size: {x: 1.0, y: 1.0, z: 1.0} - size: 512 x 512 x 1 x 1 - memory_peak: null - outputs: - - {name: sample_output_0.tif, size: 512 x 512 x 1 x 1, type: image} - runtime: null -covers: [input.png, output.png] -description: 2D UNet trained on data from the ISBI 2D segmentation challenge. -documentation: README.md -format_version: 0.4.0 -inputs: -- axes: bxyc - data_range: [0.0, 255.0] - data_type: uint8 - name: input - preprocessing: - - kwargs: {axes: xyc, max_percentile: 99.8, min_percentile: 1, mode: per_sample} - name: scale_range - shape: [1, 512, 512, 1] -license: MIT -links: [deepimagej/deepimagej] -name: Unet2DKeras -outputs: -- axes: bxyc - data_range: [-.inf, .inf] - data_type: float64 - name: output - shape: [1, 512, 512, 1] -sample_inputs: [sample_input_0.tif] -sample_outputs: [sample_output_0.tif] -tags: [zerocostdl4mic, deepimagej, segmentation, tem, unet] -test_inputs: [test_input.npy] -test_outputs: [test_output.npy] -timestamp: '2021-11-30T11:37:51.374314' -type: model -weights: - keras_hdf5: {sha256: 2c0d85bbb37f424d4927bd6ccd6537bac815b1c0ed5f0b4bfa313be3efba96af, - source: keras_weights.hdf5, tensorflow_version: '1.15'} diff --git a/example_specs/models/unet2d_keras_tf2/rdf.yaml b/example_specs/models/unet2d_keras_tf2/rdf.yaml deleted file mode 100644 index 180a0ce86..000000000 --- a/example_specs/models/unet2d_keras_tf2/rdf.yaml +++ /dev/null @@ -1,87 +0,0 @@ -attachments: - files: [per_sample_scale_range.ijm, Contours2InstanceSegmentation.ijm, training_evaluation.csv] -authors: -- {affiliation: Instituto Gulbenkian de Ciencia, name: Estibaliz Gomez de Mariscal} -cite: -- {doi: 'https://doi.org/10.1038/s41592-018-0261-2', text: Falk et al. Nature Methods - 2019} -- {doi: 'https://doi.org/10.1007/978-3-319-24574-4_28', text: Ronneberger et al. arXiv - in 2015} -- {doi: 'https://doi.org/10.1101/2020.03.20.000133', text: Lucas von Chamier et al. - biorXiv 2020} -- {doi: 'https://doi.org/10.1038/s42003-022-03634-z', text: Christoph Spahn et al. - Communications Biology 2022} -config: - _id: 10.5281/zenodo.7261974/7261975 - bioimageio: - created: '2022-10-28 17:06:39.509102' - doi: 10.5281/zenodo.7261975 - nickname: placid-llama - nickname_icon: "\U0001F999" - owners: [147356] - status: accepted - version_id: '7261975' - version_name: version 1 - deepimagej: - allow_tiling: true - model_keys: null - prediction: - postprocess: - - {spec: null} - preprocess: - - {kwargs: per_sample_scale_range.ijm, spec: 'ij.IJ::runMacroFile'} - pyramidal_model: false - test_information: - inputs: - - name: test_input.npy - pixel_size: {x: 0.065, y: 0.065, z: 1} - size: 256 x 256 x 1 x 1 - memory_peak: null - outputs: - - {name: test_output.npy, size: 256 x 256 x 1 x 3, type: image} - runtime: null -covers: [cover_1.png, cover.png] -description: This trained 2D U-Net model segments the contour, foreground and background - of Bacillus Subtilis bacteria imaged with Widefield microscopy images. The current - segmentation enables running further watershed segmentation to obtain a unique label - for each individual bacteria detected in the image. -documentation: README.md -format_version: 0.4.8 -inputs: -- axes: bxyc - data_range: [0.0, 255.0] - data_type: uint8 - name: input - preprocessing: - - kwargs: {axes: xyc, max_percentile: 99.8, min_percentile: 1, mode: per_sample} - name: scale_range - shape: [1, 256, 256, 1] -license: MIT -links: [deepimagej/deepimagej, imjoy/BioImageIO-Packager, ilastik/ilastik] -maintainers: -- {github_user: esgomezm} -name: ' B. Sutilist bacteria segmentation - Widefield microscopy - 2D UNet' -outputs: -- axes: bxyc - data_range: [-.inf, .inf] - data_type: float32 - name: output - shape: - offset: [0.0, 0.0, 0.0, 0.0] - reference_tensor: input - scale: [1.0, 1.0, 1.0, 3.0] -rdf_source: https://bioimage-io.github.io/collection-bioimage-io/rdfs/10.5281/zenodo.7261974/7261975/rdf.yaml -sample_inputs: [sample_input_0.tif] -sample_outputs: [sample_output_0.tif] -tags: [zerocostdl4mic, segmentation, 2d, unet, bacillus-subtilis, fluorescence-light-microscopy, - cells, semantic-segmentation, bacteria, microbiology] -test_inputs: [test_input.npy] -test_outputs: [test_output.npy] -timestamp: '2022-10-28T16:20:17.690336' -type: model -version: 0.1.0 -weights: - keras_hdf5: {sha256: 8f64a31164843648f1fc2e4c57c08299b9bae4b18dbeecb9f823585ca971e90b, - source: keras_weights.hdf5, tensorflow_version: 2.9.2} - tensorflow_saved_model_bundle: {sha256: 40333b1333594edff9b0cc4243da5304addcacf2e77a5b12f062f102f6005c56, - source: tf-weights.zip, tensorflow_version: 2.6.2} diff --git a/example_specs/models/unet2d_multi_tensor/rdf.yaml b/example_specs/models/unet2d_multi_tensor/rdf.yaml deleted file mode 100644 index 98302141c..000000000 --- a/example_specs/models/unet2d_multi_tensor/rdf.yaml +++ /dev/null @@ -1,71 +0,0 @@ -authors: -- {name: Constantin Pape; @constantinpape} -cite: -- {text: training library, url: 'https://doi.org/10.5281/zenodo.5108853'} -- {text: architecture, url: 'https://doi.org/10.1007/978-3-319-24574-4_28'} -covers: [cover.jpg] -dependencies: conda:./environment.yaml -description: multi-tensor -documentation: documentation.md -format_version: 0.4.0 -git_repo: https://github.com/constantinpape/torch-em.git -inputs: -- axes: bcyx - data_range: [-.inf, .inf] - data_type: float32 - name: input0 - preprocessing: - - kwargs: {axes: cyx, mode: per_sample} - name: zero_mean_unit_variance - shape: - min: [1, 1, 32, 32] - step: [0, 0, 16, 16] -- axes: bcyx - data_range: [-.inf, .inf] - data_type: float32 - name: input1 - preprocessing: - - kwargs: {axes: cyx, mode: per_sample} - name: zero_mean_unit_variance - shape: - min: [1, 1, 32, 32] - step: [0, 0, 16, 16] -license: CC-BY-4.0 -links: [ilastik/ilastik] -name: multi-tensor -outputs: -- axes: bcyx - data_range: [-.inf, .inf] - data_type: float32 - name: output0 - shape: - offset: [0, 0, 0, 0] - reference_tensor: input0 - scale: [1, 1, 1, 1] -- axes: bcyx - data_range: [-.inf, .inf] - data_type: float32 - name: output1 - shape: - offset: [0, 0, 0, 0] - reference_tensor: input1 - scale: [1, 1, 1, 1] -tags: [segmentation] -test_inputs: [test_input_0.npy, test_input_1.npy] -test_outputs: [test_output_0.npy, test_output_1.npy] -timestamp: '2021-09-13T15:55:34.193995' -type: model -weights: - onnx: - opset_version: 12 - sha256: 9b5bd88a3d29cf9979b30c03b4d5af12fdfa1d7193f5d2f2cc3942ffcf71ce3c - source: ./weights.onnx - torchscript: - sha256: 097bb5062df1fe48a5e7473ea2f6025c77d334a9e3f92af79fc3d6d530c01720 - source: ./weights-torchscript.pt - pytorch_state_dict: - architecture: ./multi_tensor_unet.py:MultiTensorUNet - architecture_sha256: 5e3d36b5187b85d5c935f2efde7cafe293dbffa413618f49a0744bf1be75c22b - kwargs: {depth: 3, in_channels: 2, initial_features: 16, out_channels: 2} - sha256: c498522b3f2b02429b41fe9dbcb722ce0d7ad4cae7fcf8059cee27857ae49b00 - source: ./weights.pt diff --git a/example_specs/models/unet2d_nuclei_broad/invalid_rdf_v0_4_0_duplicate_tensor_names.yaml b/example_specs/models/unet2d_nuclei_broad/invalid_rdf_v0_4_0_duplicate_tensor_names.yaml deleted file mode 100644 index e55499b2a..000000000 --- a/example_specs/models/unet2d_nuclei_broad/invalid_rdf_v0_4_0_duplicate_tensor_names.yaml +++ /dev/null @@ -1,85 +0,0 @@ -# TODO physical scale of the data -format_version: 0.4.0 - -name: UNet 2D Nuclei Broad -description: A 2d U-Net trained on the nuclei broad dataset. -authors: - - name: "Constantin Pape;@bioimage-io" - affiliation: "EMBL Heidelberg" - orcid: "0000-0001-6562-7187" - - name: "Fynn Beuttenmueller" - affiliation: "EMBL Heidelberg" - orcid: "0000-0002-8567-6389" - -# we allow for multiple citations. Each citation contains TEXT, DOI and URL. One of DOI or URL needs to be given. -cite: - - text: "Ronneberger, Olaf et al. U-net: Convolutional networks for biomedical image segmentation. MICCAI 2015." - doi: https://doi.org/10.1007/978-3-319-24574-4_28 - - text: "2018 Data Science Bowl" - url: https://www.kaggle.com/c/data-science-bowl-2018 - -git_repo: https://github.com/bioimage-io/spec-bioimage-io/tree/main/example_specs/models/unet2d_nuclei_broad -tags: [unet2d, pytorch, nucleus, segmentation, dsb2018] -license: MIT - -documentation: README.md -covers: [cover0.png] -attachments: {} -timestamp: 2019-12-11T12:22:32Z # ISO 8601 - -inputs: - - name: data - description: raw input - axes: bcyx # letters of axes in btczyx - data_type: float32 - data_range: [-.inf, .inf] - shape: [1, 1, 512, 512] - preprocessing: # list of preprocessing steps - - name: zero_mean_unit_variance # name of preprocessing step - kwargs: - mode: per_sample # mode in [fixed, per_dataset, per_sample] - axes: yx # subset of axes to normalize jointly, batch ('b') is not a valid axis key here! - -outputs: - - name: data # <<--------------------------------------- invalid as name 'data' is used in inputs already!!! - description: probability in [0,1] - axes: bcyx - data_type: float32 - data_range: [-.inf, .inf] - halo: [0, 0, 32, 32] - shape: - reference_tensor: raw - scale: [1.0, 1.0, 1.0, 1.0] - offset: [0.0, 0.0, 0.0, 0.0] - -dependencies: conda:environment.yaml - -test_inputs: [test_input.npy] -test_outputs: [test_output.npy] - -sample_inputs: [test_input.npy] -sample_outputs: [test_output.npy] - -weights: - pytorch_state_dict: - authors: - - name: "Constantin Pape;@bioimage-io" - affiliation: "EMBL Heidelberg" - orcid: "0000-0001-6562-7187" - sha256: e4d3885bccbe41cbf6c1d825f3cd2b707c7021ead5593156007e407a16b27cf2 - source: https://zenodo.org/record/3446812/files/unet2d_weights.torch - architecture: unet2d.py:UNet2d - architecture_sha256: cf42a6d86adeb4eb6e8e37b539a20e5413866b183bed88f4e2e26ad1639761ed - kwargs: {input_channels: 1, output_channels: 1} - onnx: - sha256: 5bf14c4e65e8601ab551db99409ba7981ff0e501719bc2b0ee625ca9a9375b32 - source: weights.onnx - opset_version: 12 - parent: https://zenodo.org/record/3446812/files/unet2d_weights.torch - pytorch_script: - sha256: 62fa1c39923bee7d58a192277e0dd58f2da9ee810662addadd0f44a3784d9210 - source: weights.pt - parent: https://zenodo.org/record/3446812/files/unet2d_weights.torch - -type: model -version: 0.1.3 diff --git a/example_specs/models/unet2d_nuclei_broad/rdf_v0_3_0.yaml b/example_specs/models/unet2d_nuclei_broad/rdf_v0_3_0.yaml deleted file mode 100644 index 5e5d5dbee..000000000 --- a/example_specs/models/unet2d_nuclei_broad/rdf_v0_3_0.yaml +++ /dev/null @@ -1,93 +0,0 @@ -format_version: 0.3.0 - -name: UNet 2D Nuclei Broad -description: A 2d U-Net trained on the nuclei broad dataset. -authors: - - "Constantin Pape;@bioimage-io" - - "Fynn Beuttenmueller" - -# we allow for multiple citations. Each citation contains TEXT, DOI and URL. One of DOI or URL needs to be given. -cite: - - text: "Ronneberger, Olaf et al. U-net: Convolutional networks for biomedical image segmentation. MICCAI 2015." - doi: https://doi.org/10.1007/978-3-319-24574-4_28 - - text: "2018 Data Science Bowl" - url: https://www.kaggle.com/c/data-science-bowl-2018 - -git_repo: https://github.com/bioimage-io/spec-bioimage-io/tree/main/example_specs/models/unet2d_nuclei_broad -tags: [unet2d, pytorch, nucleus, segmentation, dsb2018] -license: MIT - -documentation: README.md -covers: [cover0.png] -attachments: {} -timestamp: 2019-12-11T12:22:32Z # ISO 8601 - -inputs: - - name: raw - description: raw input - axes: bcyx # letters of axes in btczyx - data_type: float32 - data_range: [-.inf, .inf] - shape: [1, 1, 512, 512] - preprocessing: # list of preprocessing steps - - name: zero_mean_unit_variance # name of preprocessing step - kwargs: - mode: per_sample # mode in [fixed, per_dataset, per_sample] - axes: yx # subset of axes to normalize jointly, batch ('b') is not a valid axis key here! - -outputs: - - name: probability - description: probability in [0,1] - axes: bcyx - data_type: float32 - data_range: [-.inf, .inf] - halo: [0, 0, 32, 32] - shape: - reference_input: raw - scale: [1.0, 1.0, 1.0, 1.0] - offset: [0.0, 0.0, 0.0, 0.0] - -language: python -framework: pytorch -source: unet2d.py:UNet2d -sha256: cf42a6d86adeb4eb6e8e37b539a20e5413866b183bed88f4e2e26ad1639761ed -kwargs: {input_channels: 1, output_channels: 1} -dependencies: conda:environment.yaml - -test_inputs: [test_input.npy] -test_outputs: [test_output.npy] - -sample_inputs: [test_input.npy] -sample_outputs: [test_output.npy] - -weights: - pytorch_state_dict: - authors: - - "Constantin Pape;@bioimage-io" - sha256: e4d3885bccbe41cbf6c1d825f3cd2b707c7021ead5593156007e407a16b27cf2 - source: https://zenodo.org/record/3446812/files/unet2d_weights.torch - onnx: - sha256: f1f086d5e340f9d4d7001a1b62a2b835f9b87a2fb5452c4fe7d8cc821bdf539c - source: ./weights.onnx - opset_version: 12 - parent: https://zenodo.org/record/3446812/files/unet2d_weights.torch - pytorch_script: - sha256: 62fa1c39923bee7d58a192277e0dd58f2da9ee810662addadd0f44a3784d9210 - source: ./weights.pt - parent: https://zenodo.org/record/3446812/files/unet2d_weights.torch - -type: model -config: - future: - 0.3.2: - authors: - - affiliation: "EMBL Heidelberg" - orcid: "0000-0001-6562-7187" - - affiliation: "EMBL Heidelberg" - orcid: "0000-0002-8567-6389" - weights: - pytorch_state_dict: - authors: - - affiliation: "EMBL Heidelberg" - orcid: "0000-0001-6562-7187" - version: 0.1.3 diff --git a/example_specs/models/unet2d_nuclei_broad/rdf_v0_3_1.yaml b/example_specs/models/unet2d_nuclei_broad/rdf_v0_3_1.yaml deleted file mode 100644 index a7f6f3e16..000000000 --- a/example_specs/models/unet2d_nuclei_broad/rdf_v0_3_1.yaml +++ /dev/null @@ -1,93 +0,0 @@ -format_version: 0.3.1 - -name: UNet 2D Nuclei Broad -description: A 2d U-Net trained on the nuclei broad dataset. -authors: - - "Constantin Pape;@bioimage-io" - - "Fynn Beuttenmueller" - -# we allow for multiple citations. Each citation contains TEXT, DOI and URL. One of DOI or URL needs to be given. -cite: - - text: "Ronneberger, Olaf et al. U-net: Convolutional networks for biomedical image segmentation. MICCAI 2015." - doi: https://doi.org/10.1007/978-3-319-24574-4_28 - - text: "2018 Data Science Bowl" - url: https://www.kaggle.com/c/data-science-bowl-2018 - -git_repo: https://github.com/bioimage-io/spec-bioimage-io/tree/main/example_specs/models/unet2d_nuclei_broad -tags: [unet2d, pytorch, nucleus, segmentation, dsb2018] -license: MIT - -documentation: README.md -covers: [cover0.png] -attachments: {} -timestamp: 2019-12-11T12:22:32Z # ISO 8601 - -inputs: - - name: raw - description: raw input - axes: bcyx # letters of axes in btczyx - data_type: float32 - data_range: [-.inf, .inf] - shape: [1, 1, 512, 512] - preprocessing: # list of preprocessing steps - - name: zero_mean_unit_variance # name of preprocessing step - kwargs: - mode: per_sample # mode in [fixed, per_dataset, per_sample] - axes: yx # subset of axes to normalize jointly, batch ('b') is not a valid axis key here! - -outputs: - - name: probability - description: probability in [0,1] - axes: bcyx - data_type: float32 - data_range: [-.inf, .inf] - halo: [0, 0, 32, 32] - shape: - reference_input: raw - scale: [1.0, 1.0, 1.0, 1.0] - offset: [0.0, 0.0, 0.0, 0.0] - -language: python -framework: pytorch -source: unet2d.py:UNet2d -sha256: cf42a6d86adeb4eb6e8e37b539a20e5413866b183bed88f4e2e26ad1639761ed -kwargs: {input_channels: 1, output_channels: 1} -dependencies: conda:environment.yaml - -test_inputs: [test_input.npy] -test_outputs: [test_output.npy] - -sample_inputs: [test_input.npy] -sample_outputs: [test_output.npy] - -weights: - pytorch_state_dict: - authors: - - "Constantin Pape;@bioimage-io" - sha256: e4d3885bccbe41cbf6c1d825f3cd2b707c7021ead5593156007e407a16b27cf2 - source: https://zenodo.org/record/3446812/files/unet2d_weights.torch - onnx: - sha256: f1f086d5e340f9d4d7001a1b62a2b835f9b87a2fb5452c4fe7d8cc821bdf539c - source: ./weights.onnx - opset_version: 12 - parent: https://zenodo.org/record/3446812/files/unet2d_weights.torch - pytorch_script: - sha256: 62fa1c39923bee7d58a192277e0dd58f2da9ee810662addadd0f44a3784d9210 - source: ./weights.pt - parent: https://zenodo.org/record/3446812/files/unet2d_weights.torch - -type: model -config: - future: - 0.3.2: - authors: - - affiliation: "EMBL Heidelberg" - orcid: "0000-0001-6562-7187" - - affiliation: "EMBL Heidelberg" - orcid: "0000-0002-8567-6389" - weights: - pytorch_state_dict: - authors: - - affiliation: "EMBL Heidelberg" - orcid: "0000-0001-6562-7187" - version: 0.1.3 diff --git a/example_specs/models/unet2d_nuclei_broad/rdf_v0_3_2.yaml b/example_specs/models/unet2d_nuclei_broad/rdf_v0_3_2.yaml deleted file mode 100644 index d5b24db86..000000000 --- a/example_specs/models/unet2d_nuclei_broad/rdf_v0_3_2.yaml +++ /dev/null @@ -1,87 +0,0 @@ -# TODO physical scale of the data -format_version: 0.3.2 - -name: UNet 2D Nuclei Broad -description: A 2d U-Net trained on the nuclei broad dataset. -authors: - - name: "Constantin Pape;@bioimage-io" - affiliation: "EMBL Heidelberg" - orcid: "0000-0001-6562-7187" - - name: "Fynn Beuttenmueller" - affiliation: "EMBL Heidelberg" - orcid: "0000-0002-8567-6389" - -# we allow for multiple citations. Each citation contains TEXT, DOI and URL. One of DOI or URL needs to be given. -cite: - - text: "Ronneberger, Olaf et al. U-net: Convolutional networks for biomedical image segmentation. MICCAI 2015." - doi: https://doi.org/10.1007/978-3-319-24574-4_28 - - text: "2018 Data Science Bowl" - url: https://www.kaggle.com/c/data-science-bowl-2018 - -git_repo: https://github.com/bioimage-io/spec-bioimage-io/tree/main/example_specs/models/unet2d_nuclei_broad -tags: [unet2d, pytorch, nucleus, segmentation, dsb2018] -license: MIT - -documentation: README.md -covers: [cover0.png] -attachments: {} -timestamp: 2019-12-11T12:22:32Z # ISO 8601 - -inputs: - - name: raw - description: raw input - axes: bcyx # letters of axes in btczyx - data_type: float32 - data_range: [-.inf, .inf] - shape: [1, 1, 512, 512] - preprocessing: # list of preprocessing steps - - name: zero_mean_unit_variance # name of preprocessing step - kwargs: - mode: per_sample # mode in [fixed, per_dataset, per_sample] - axes: yx # subset of axes to normalize jointly, batch ('b') is not a valid axis key here! - -outputs: - - name: probability - description: probability in [0,1] - axes: bcyx - data_type: float32 - data_range: [-.inf, .inf] - halo: [0, 0, 32, 32] - shape: - reference_input: raw - scale: [1.0, 1.0, 1.0, 1.0] - offset: [0.0, 0.0, 0.0, 0.0] - -language: python -framework: pytorch -source: unet2d.py:UNet2d -sha256: cf42a6d86adeb4eb6e8e37b539a20e5413866b183bed88f4e2e26ad1639761ed -kwargs: {input_channels: 1, output_channels: 1} -dependencies: conda:environment.yaml - -test_inputs: [test_input.npy] -test_outputs: [test_output.npy] - -sample_inputs: [test_input.npy] -sample_outputs: [test_output.npy] - -weights: - pytorch_state_dict: - authors: - - name: "Constantin Pape;@bioimage-io" - affiliation: "EMBL Heidelberg" - orcid: "0000-0001-6562-7187" - sha256: e4d3885bccbe41cbf6c1d825f3cd2b707c7021ead5593156007e407a16b27cf2 - source: https://zenodo.org/record/3446812/files/unet2d_weights.torch - onnx: - sha256: f1f086d5e340f9d4d7001a1b62a2b835f9b87a2fb5452c4fe7d8cc821bdf539c - source: ./weights.onnx - opset_version: 12 - parent: https://zenodo.org/record/3446812/files/unet2d_weights.torch - pytorch_script: - sha256: 62fa1c39923bee7d58a192277e0dd58f2da9ee810662addadd0f44a3784d9210 - source: ./weights.pt - parent: https://zenodo.org/record/3446812/files/unet2d_weights.torch - -type: model -version: 0.1.3 diff --git a/example_specs/models/unet2d_nuclei_broad/rdf_v0_3_3.yaml b/example_specs/models/unet2d_nuclei_broad/rdf_v0_3_3.yaml deleted file mode 100644 index a5d18e1f8..000000000 --- a/example_specs/models/unet2d_nuclei_broad/rdf_v0_3_3.yaml +++ /dev/null @@ -1,87 +0,0 @@ -# TODO physical scale of the data -format_version: 0.3.3 - -name: UNet 2D Nuclei Broad -description: A 2d U-Net trained on the nuclei broad dataset. -authors: - - name: "Constantin Pape;@bioimage-io" - affiliation: "EMBL Heidelberg" - orcid: "0000-0001-6562-7187" - - name: "Fynn Beuttenmueller" - affiliation: "EMBL Heidelberg" - orcid: "0000-0002-8567-6389" - -# we allow for multiple citations. Each citation contains TEXT, DOI and URL. One of DOI or URL needs to be given. -cite: - - text: "Ronneberger, Olaf et al. U-net: Convolutional networks for biomedical image segmentation. MICCAI 2015." - doi: https://doi.org/10.1007/978-3-319-24574-4_28 - - text: "2018 Data Science Bowl" - url: https://www.kaggle.com/c/data-science-bowl-2018 - -git_repo: https://github.com/bioimage-io/spec-bioimage-io/tree/main/example_specs/models/unet2d_nuclei_broad -tags: [unet2d, pytorch, nucleus, segmentation, dsb2018] -license: MIT - -documentation: README.md -covers: [cover0.png] -attachments: {} -timestamp: 2019-12-11T12:22:32Z # ISO 8601 - -inputs: - - name: raw - description: raw input - axes: bcyx # letters of axes in btczyx - data_type: float32 - data_range: [-.inf, .inf] - shape: [1, 1, 512, 512] - preprocessing: # list of preprocessing steps - - name: zero_mean_unit_variance # name of preprocessing step - kwargs: - mode: per_sample # mode in [fixed, per_dataset, per_sample] - axes: yx # subset of axes to normalize jointly, batch ('b') is not a valid axis key here! - -outputs: - - name: probability - description: probability in [0,1] - axes: bcyx - data_type: float32 - data_range: [-.inf, .inf] - halo: [0, 0, 32, 32] - shape: - reference_tensor: raw - scale: [1.0, 1.0, 1.0, 1.0] - offset: [0, 0, 0, 0] - -language: python -framework: pytorch -source: unet2d.py:UNet2d -sha256: cf42a6d86adeb4eb6e8e37b539a20e5413866b183bed88f4e2e26ad1639761ed -kwargs: {input_channels: 1, output_channels: 1} -dependencies: conda:environment.yaml - -test_inputs: [test_input.npy] -test_outputs: [test_output.npy] - -sample_inputs: [test_input.npy] -sample_outputs: [test_output.npy] - -weights: - pytorch_state_dict: - authors: - - name: "Constantin Pape;@bioimage-io" - affiliation: "EMBL Heidelberg" - orcid: "0000-0001-6562-7187" - sha256: e4d3885bccbe41cbf6c1d825f3cd2b707c7021ead5593156007e407a16b27cf2 - source: https://zenodo.org/record/3446812/files/unet2d_weights.torch - onnx: - sha256: f1f086d5e340f9d4d7001a1b62a2b835f9b87a2fb5452c4fe7d8cc821bdf539c - source: weights.onnx - opset_version: 12 - parent: https://zenodo.org/record/3446812/files/unet2d_weights.torch - pytorch_script: - sha256: 62fa1c39923bee7d58a192277e0dd58f2da9ee810662addadd0f44a3784d9210 - source: weights.pt - parent: https://zenodo.org/record/3446812/files/unet2d_weights.torch - -type: model -version: 0.1.3 diff --git a/example_specs/models/unet2d_nuclei_broad/rdf_v0_3_6.yaml b/example_specs/models/unet2d_nuclei_broad/rdf_v0_3_6.yaml deleted file mode 100644 index a0fc6ff79..000000000 --- a/example_specs/models/unet2d_nuclei_broad/rdf_v0_3_6.yaml +++ /dev/null @@ -1,87 +0,0 @@ -# TODO physical scale of the data -format_version: 0.3.6 - -name: UNet 2D Nuclei Broad -description: A 2d U-Net trained on the nuclei broad dataset. -authors: - - name: "Constantin Pape;@bioimage-io" - affiliation: "EMBL Heidelberg" - orcid: "0000-0001-6562-7187" - - name: "Fynn Beuttenmueller" - affiliation: "EMBL Heidelberg" - orcid: "0000-0002-8567-6389" - -# we allow for multiple citations. Each citation contains TEXT, DOI and URL. One of DOI or URL needs to be given. -cite: - - text: "Ronneberger, Olaf et al. U-net: Convolutional networks for biomedical image segmentation. MICCAI 2015." - doi: https://doi.org/10.1007/978-3-319-24574-4_28 - - text: "2018 Data Science Bowl" - url: https://www.kaggle.com/c/data-science-bowl-2018 - -git_repo: https://github.com/bioimage-io/spec-bioimage-io/tree/main/example_specs/models/unet2d_nuclei_broad -tags: [unet2d, pytorch, nucleus, segmentation, dsb2018] -license: MIT - -documentation: README.md -covers: [cover0.png] -attachments: {} -timestamp: 2019-12-11T12:22:32Z # ISO 8601 - -inputs: - - name: raw - description: raw input - axes: bcyx # letters of axes in btczyx - data_type: float32 - data_range: [-.inf, .inf] - shape: [1, 1, 512, 512] - preprocessing: # list of preprocessing steps - - name: zero_mean_unit_variance # name of preprocessing step - kwargs: - mode: per_sample # mode in [fixed, per_dataset, per_sample] - axes: yx # subset of axes to normalize jointly, batch ('b') is not a valid axis key here! - -outputs: - - name: probability - description: probability in [0,1] - axes: bcyx - data_type: float32 - data_range: [-.inf, .inf] - halo: [0, 0, 32, 32] - shape: - reference_tensor: raw - scale: [1.0, 1.0, 1.0, 1.0] - offset: [0, 0, 0, 0] - -language: python -framework: pytorch -source: unet2d.py:UNet2d -sha256: cf42a6d86adeb4eb6e8e37b539a20e5413866b183bed88f4e2e26ad1639761ed -kwargs: {input_channels: 1, output_channels: 1} -dependencies: conda:environment.yaml - -test_inputs: [test_input.npy] -test_outputs: [test_output.npy] - -sample_inputs: [test_input.npy] -sample_outputs: [test_output.npy] - -weights: - pytorch_state_dict: - authors: - - name: "Constantin Pape;@bioimage-io" - affiliation: "EMBL Heidelberg" - orcid: "0000-0001-6562-7187" - sha256: e4d3885bccbe41cbf6c1d825f3cd2b707c7021ead5593156007e407a16b27cf2 - source: https://zenodo.org/record/3446812/files/unet2d_weights.torch - onnx: - sha256: f1f086d5e340f9d4d7001a1b62a2b835f9b87a2fb5452c4fe7d8cc821bdf539c - source: weights.onnx - opset_version: 12 - parent: https://zenodo.org/record/3446812/files/unet2d_weights.torch - pytorch_script: - sha256: 62fa1c39923bee7d58a192277e0dd58f2da9ee810662addadd0f44a3784d9210 - source: weights.pt - parent: https://zenodo.org/record/3446812/files/unet2d_weights.torch - -type: model -version: 0.1.3 diff --git a/example_specs/models/unet2d_nuclei_broad/test_input.npy b/example_specs/models/unet2d_nuclei_broad/test_input.npy deleted file mode 100644 index 228057f8d..000000000 Binary files a/example_specs/models/unet2d_nuclei_broad/test_input.npy and /dev/null differ diff --git a/example_specs/models/upsample_test_model/rdf.yaml b/example_specs/models/upsample_test_model/rdf.yaml deleted file mode 100644 index 66c975805..000000000 --- a/example_specs/models/upsample_test_model/rdf.yaml +++ /dev/null @@ -1,38 +0,0 @@ -authors: -- {name: William Patton} -cite: -- {doi: NA, text: Test} -covers: [] -description: test model for scaling -documentation: docs.md -format_version: 0.4.6 -inputs: -- axes: bcyx - data_range: [-.inf, .inf] - data_type: float64 - name: input - shape: - min: [1, 1, 5, 5] - step: [0, 0, 0, 0] -license: CC-BY-4.0 -name: test_scaling -outputs: -- axes: bcyx - data_range: [-.inf, .inf] - data_type: float64 - halo: [0, 0, 0, 0] - name: output - shape: - offset: [0.0, 0.0, 0.0, 0.0] - reference_tensor: input - scale: [1.0, 1.0, 4.0, 4.0] -tags: [scaling, test] -test_inputs: [test_input.npy] -test_outputs: [test_output.npy] -timestamp: '2022-10-23T0:0:0' -type: model -weights: - pytorch_state_dict: - architecture: upsample_model.py:Upscaler - architecture_sha256: 944f960311e1eedc42072fe264cb6b4e544f53cda3da9e6525621e5e3d4ba69b - source: weights.pt \ No newline at end of file diff --git a/example_use/load_model_and_create_your_own.ipynb b/example_use/load_model_and_create_your_own.ipynb new file mode 100644 index 000000000..3480a068d --- /dev/null +++ b/example_use/load_model_and_create_your_own.ipynb @@ -0,0 +1,435 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from bioimageio.spec.pretty_validation_errors import (\n", + " enable_pretty_validation_errors_in_ipynb,\n", + ")\n", + "\n", + "enable_pretty_validation_errors_in_ipynb()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Load and inspect a model description" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from bioimageio.spec import InvalidDescr, load_description\n", + "from bioimageio.spec.common import HttpUrl\n", + "from bioimageio.spec.model import ModelDescr\n", + "\n", + "# examples tested with this notebook\n", + "IMPARTIAL_SHRIMP = \"impartial-shrimp\"\n", + "PIONEERING_RHINO = \"pioneering-rhino\"\n", + "\n", + "# example_model_id = IMPARTIAL_SHRIMP\n", + "example_model_id = PIONEERING_RHINO\n", + "\n", + "# TODO: load bioimageio id from new S3 collection\n", + "if example_model_id == IMPARTIAL_SHRIMP: # pyright: ignore[reportUnnecessaryComparison]\n", + " url = \"https://bioimage-io.github.io/collection-bioimage-io/rdfs/10.5281/zenodo.5874741/5874742/rdf.yaml\"\n", + "elif example_model_id == PIONEERING_RHINO:\n", + " url = \"https://bioimage-io.github.io/collection-bioimage-io/rdfs/10.5281/zenodo.6334383/7805067/rdf.yaml\"\n", + "else:\n", + " raise NotImplementedError(example_model_id)\n", + "\n", + "model = load_description(url, format_version=\"latest\")\n", + "if isinstance(model, InvalidDescr):\n", + " raise ValueError(f\"Failed to load {example_model_id}:\\n{model.validation_summary.format()}\")\n", + "elif not isinstance(model, ModelDescr):\n", + " raise ValueError(\"This notebook expects a model description\")\n", + "\n", + "print(f\"loaded '{model.name}'\")\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "assert model.validation_summary is not None\n", + "print(model.validation_summary.format())" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import matplotlib.pyplot as plt\n", + "from imageio.v3 import imread\n", + "\n", + "from bioimageio.spec.utils import download\n", + "\n", + "for cover in model.covers:\n", + " cover_data = imread(download(cover).path)\n", + " plt.imshow(cover_data)\n", + " plt.xticks([])\n", + " plt.yticks([])\n", + " plt.show()\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "the following parts assume we only have a single input and a single output tensor" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "assert len(model.inputs) == 1\n", + "assert len(model.outputs) == 1\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from pprint import pprint\n", + "\n", + "import numpy as np\n", + "\n", + "pprint(model.inputs[0].axes)\n", + "test_input_path = model.inputs[0].test_tensor.download().path\n", + "test_input_array = np.load(test_input_path)\n", + "print(test_input_array.shape)\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "pprint(model.outputs[0].axes)\n", + "test_output_path = model.outputs[0].test_tensor.download().path\n", + "test_output_array = np.load(test_output_path)\n", + "print(test_output_array.shape)\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "assert model.weights.pytorch_state_dict is not None\n", + "pytorch_state_dict_weights_src = model.weights.pytorch_state_dict.download().path\n", + "print(pytorch_state_dict_weights_src)\n", + "assert model.weights.torchscript is not None\n", + "torchscript_weights_src = model.weights.torchscript.download().path\n", + "print(torchscript_weights_src)\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from bioimageio.spec.model.v0_5 import ArchitectureFromFileDescr\n", + "from bioimageio.spec.utils import download\n", + "\n", + "assert model.weights.pytorch_state_dict is not None\n", + "arch = model.weights.pytorch_state_dict.architecture\n", + "assert isinstance(arch, ArchitectureFromFileDescr)\n", + "print(f\"Model architecture given by '{arch.callable}' in {arch.source}\")\n", + "print(\"architecture key word arguments:\")\n", + "pprint(arch.kwargs)\n", + "arch_file_path = download(arch.source, sha256=arch.sha256).path\n", + "arch_file_sha256 = arch.sha256\n", + "arch_name = arch.callable\n", + "arch_kwargs = arch.kwargs\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Create a model description\n", + "\n", + "Let's recreate a model based on parts of the loaded model description from above!\n", + "\n", + "Creating a model description in Python means creating a `ModelDescr` object.\n", + "Without any input data this will raise a `ValidationError` listing missing fields that are required:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from bioimageio.spec.common import ValidationError\n", + "from bioimageio.spec.model.v0_5 import ModelDescr\n", + "\n", + "try:\n", + " my_model_descr = ModelDescr() # type: ignore\n", + "except ValidationError as e:\n", + " print(e)\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "to populate a `ModelDescr` appropriately we create the required subparts.\n", + "Let's start with the inputs:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from bioimageio.spec.model.v0_5 import (\n", + " Author,\n", + " AxisId,\n", + " BatchAxis,\n", + " ChannelAxis,\n", + " CiteEntry,\n", + " Doi,\n", + " FileDescr,\n", + " Identifier,\n", + " InputTensorDescr,\n", + " IntervalOrRatioDataDescr,\n", + " ModelDescr,\n", + " OutputTensorDescr,\n", + " ParameterizedSize,\n", + " PytorchStateDictWeightsDescr,\n", + " SizeReference,\n", + " SpaceInputAxis,\n", + " SpaceOutputAxis,\n", + " TensorId,\n", + " TorchscriptWeightsDescr,\n", + " WeightsDescr,\n", + ")\n", + "\n", + "input_axes = [\n", + " BatchAxis(),\n", + " ChannelAxis(channel_names=[Identifier(\"raw\")])]\n", + "if example_model_id == \"impartial-shrimp\":\n", + " input_axes += [\n", + " SpaceInputAxis(id=AxisId(\"z\"), size=ParameterizedSize(min=16, step=8)),\n", + " SpaceInputAxis(id=AxisId('y'), size=ParameterizedSize(min=144, step=72)),\n", + " SpaceInputAxis(id=AxisId('x'), size=ParameterizedSize(min=144, step=72)),\n", + " ]\n", + " data_descr = IntervalOrRatioDataDescr(type=\"uint8\")\n", + "elif example_model_id == \"pioneering-rhino\":\n", + " input_axes += [\n", + " SpaceInputAxis(id=AxisId('y'), size=ParameterizedSize(min=256, step=8)),\n", + " SpaceInputAxis(id=AxisId('x'), size=ParameterizedSize(min=256, step=8)),\n", + " ]\n", + " data_descr = IntervalOrRatioDataDescr()\n", + "else:\n", + " raise NotImplementedError(f\"Recreating inputs for {example_model_id} is not implemented\")\n", + "\n", + "input_descr = InputTensorDescr(id=TensorId(\"raw\"), axes=input_axes, test_tensor=FileDescr(source=test_input_path), data=data_descr)\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "... and describe the outputs very similarly:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "output_axes = [\n", + " BatchAxis(),\n", + " ChannelAxis(channel_names=[\"membrane\"])]\n", + "if example_model_id == \"impartial-shrimp\":\n", + " output_axes += [\n", + " SpaceInputAxis(id=AxisId(\"z\"), size=ParameterizedSize(min=16, step=8)), # implicitly same size as raw.z as it is parametrized the same.\n", + " SpaceInputAxis(id=AxisId('y'), size=ParameterizedSize(min=144, step=72)),\n", + " SpaceInputAxis(id=AxisId('x'), size=ParameterizedSize(min=144, step=72))\n", + " ]\n", + "elif example_model_id == \"pioneering-rhino\":\n", + " output_axes += [\n", + " SpaceOutputAxis(id=AxisId(\"y\"), size=SizeReference(tensor_id=TensorId('raw'), axis_id=AxisId('y'))), # explicitly same size as raw.y\n", + " SpaceOutputAxis(id=AxisId(\"x\"), size=SizeReference(tensor_id=TensorId('raw'), axis_id=AxisId('x'))),\n", + " ]\n", + "else:\n", + " raise NotImplementedError(f\"Recreating outputs for {example_model_id} is not implemented\")\n", + "\n", + "output_descr = OutputTensorDescr(id=TensorId(\"prob\"), axes=output_axes, test_tensor=FileDescr(source=test_output_path))\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "... and finish with describing the architecutre needed for the pytorch state dict weights:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from bioimageio.spec.model.v0_5 import ArchitectureFromFileDescr, Version\n", + "\n", + "try:\n", + " import torch\n", + "except ImportError:\n", + " pytorch_version = Version(\"1.15\")\n", + "else:\n", + " pytorch_version = Version(torch.__version__)\n", + "\n", + "pytorch_architecture = ArchitectureFromFileDescr(source=arch_file_path, sha256=arch_file_sha256, callable=arch_name, kwargs=arch_kwargs)\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "now, we are ready to create a new model:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "my_model_descr = ModelDescr(\n", + " name=\"My cool model\",\n", + " description=\"A test model for demonstration purposes only\",\n", + " authors=[Author(name=\"me\", affiliation=\"my institute\", github_user=\"bioimageiobot\")], # change github_user to your GitHub account name\n", + " cite=[CiteEntry(text=\"for model training see my paper\", doi=Doi(\"10.1234something\"))],\n", + " license=\"MIT\",\n", + " documentation=HttpUrl(\"https://raw.githubusercontent.com/bioimage-io/spec-bioimage-io/main/README.md\"),\n", + " git_repo=HttpUrl(\"https://github.com/bioimage-io/spec-bioimage-io\"), # change to repo where your model is developed\n", + " inputs=[input_descr],\n", + " outputs=[output_descr],\n", + " weights=WeightsDescr(\n", + " pytorch_state_dict=PytorchStateDictWeightsDescr(\n", + " source=pytorch_state_dict_weights_src,\n", + " architecture=pytorch_architecture,\n", + " pytorch_version=pytorch_version\n", + " ),\n", + " torchscript=TorchscriptWeightsDescr(\n", + " source=torchscript_weights_src,\n", + " pytorch_version=pytorch_version,\n", + " parent=\"pytorch_state_dict\", # these weights were converted from the pytorch_state_dict weights ones.\n", + " ),\n", + " ),\n", + " )\n", + "\n", + "print(\"created '{my_model_descr.name}'\")\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "some optional fields were filed with default values, e.g. as we did not specify `covers`, a default visualization of the test inputs and test outputs was used:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "for cover in my_model_descr.covers:\n", + " plt.imshow(imread(cover))\n", + " plt.xticks([])\n", + " plt.yticks([])\n", + " plt.show()\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## test your model" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from bioimageio.core import test_model\n", + "\n", + "summary = test_model(my_model_descr)\n", + "print(summary.format())" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "side note: the validation summary is also available as a property" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "assert summary == my_model_descr.validation_summary" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "bio38", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.17" + }, + "orig_nbformat": 4 + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/pyproject.toml b/pyproject.toml index 4e42573ba..23220a3c3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,3 +1,45 @@ [tool.black] -line-length = 120 -target-version = ['py38'] +line-length = 88 +target-version = ["py38", "py39", "py310", "py311"] +preview = true + +[tool.pyright] +exclude = ["**/node_modules", "**/__pycache__", "tests/old_*", "tests/cache"] +include = ["bioimageio", "scripts", "tests"] +pythonPlatform = "All" +pythonVersion = "3.8" +reportDuplicateImport = "error" +reportImplicitStringConcatenation = "error" +reportImportCycles = true +reportIncompatibleMethodOverride = true +reportMatchNotExhaustive = "error" +reportMissingSuperCall = "error" +reportMissingTypeArgument = true +reportMissingTypeStubs = "warning" +reportPropertyTypeMismatch = true +reportUninitializedInstanceVariable = "error" +reportUnknownMemberType = false +reportUnnecessaryIsInstance = false +reportUnnecessaryTypeIgnoreComment = "error" +reportUnsupportedDunderAll = "error" +reportUnusedCallResult = "error" +reportUnusedClass = "error" +reportUnusedExpression = "error" +reportUnusedFunction = "error" +reportUnusedImport = "error" +reportUnusedVariable = "error" +reportWildcardImportFromLibrary = "error" +strictDictionaryInference = true +strictListInference = true +strictSetInference = true +typeCheckingMode = "strict" +useLibraryCodeForTypes = true + +[tool.pytest.ini_options] +addopts = "-n auto --capture=no --failed-first" # --doctest-modules +testpaths = ["bioimageio/spec", "tests", "scripts"] + +[tool.ruff] +line-length = 88 +include = ["*.py", "*.pyi", "**/pyproject.toml", "*.ipynb"] +target-version = "py38" diff --git a/pytest.ini b/pytest.ini deleted file mode 100644 index 0549b1762..000000000 --- a/pytest.ini +++ /dev/null @@ -1,3 +0,0 @@ -[pytest] -add_opts = -s --doctest-modules -testpaths = tests diff --git a/scripts/compare_yaml_syntax.py b/scripts/compare_yaml_syntax.py index 6eb233086..5d546da61 100644 --- a/scripts/compare_yaml_syntax.py +++ b/scripts/compare_yaml_syntax.py @@ -3,11 +3,11 @@ from pathlib import Path try: - from ruamel.yaml import YAML + from ruyaml import YAML ruamel_yaml = YAML(typ="safe") except ImportError: - raise RuntimeError("Cannot compare yaml syntax without the ruamel.yaml package") + raise RuntimeError("Cannot compare yaml syntax without the ruyaml package") try: import yaml as _pyyaml_yaml @@ -16,8 +16,7 @@ class pyyaml_yaml: @staticmethod def load(path: Path): with path.open() as f: - return _pyyaml_yaml.load(f, None) # fmt: off - + return _pyyaml_yaml.load(f, _pyyaml_yaml.SafeLoader) except ImportError: raise RuntimeError("Cannot compare yaml syntax without the pyyaml package") @@ -25,32 +24,41 @@ def load(path: Path): def parse_args(): p = ArgumentParser( - description="Check for differences between yaml 1.1 (using PyYAML) and yaml 1.2 syntax (using ruamel.yaml)." + description=( + "Check for differences between yaml 1.1 (using PyYAML) and yaml 1.2 syntax" + + " (using ruyaml)." + ) ) - p.add_argument( + _ = p.add_argument( "--resource_description_path", type=Path, - default=Path(__file__).parent / "../example_specs/models/unet2d_nuclei_broad/rdf.yaml", + default=Path(__file__).parent + / "../example_specs/models/unet2d_nuclei_broad/rdf.yaml", ) args = p.parse_args() return args def main(resource_description_path: Path): - pyyaml = pyyaml_yaml.load(resource_description_path) assert isinstance(pyyaml, dict) ruamel = ruamel_yaml.load(resource_description_path) assert isinstance(ruamel, dict) - diff = {key: (value, ruamel[key]) for key, value in pyyaml.items() if value != ruamel[key]} + diff = {key: (value, ruamel[key]) for key, value in pyyaml.items() if value != ruamel[key]} # type: ignore if diff: - print(f"Found differences between yaml syntax 1.1/1.2 for {resource_description_path}:") - print(diff) + print( + "Found differences between yaml syntax 1.1/1.2 for" + + f" {resource_description_path}:" + ) + print(diff) # type: ignore else: - print(f"No differences found between yaml syntax 1.1/1.2 for {resource_description_path}:") + print( + "No differences found between yaml syntax 1.1/1.2 for" + + f" {resource_description_path}:" + ) - return len(diff) + return len(diff) # type: ignore if __name__ == "__main__": diff --git a/scripts/generate_dtype_limits.py b/scripts/generate_dtype_limits.py new file mode 100644 index 000000000..8e5c1e063 --- /dev/null +++ b/scripts/generate_dtype_limits.py @@ -0,0 +1,18 @@ +from pprint import pprint +from typing import Dict, Tuple, Union + +import numpy as np + +dtype_limits: Dict[str, Tuple[Union[float, int], Union[float, int]]] = {} + +for dtype in ["float32", "float64"]: + info = np.finfo(dtype) + dtype_limits[dtype] = (float(info.min), float(info.max)) + + +for dtype in ["uint8", "int8", "uint16", "int16", "uint32", "int32", "uint64", "int64"]: + info = np.iinfo(dtype) + dtype_limits[dtype] = (info.min, info.max) + +if __name__ == "__main__": + pprint(dtype_limits) diff --git a/scripts/generate_json_schemas.py b/scripts/generate_json_schemas.py new file mode 100644 index 000000000..f81b343d7 --- /dev/null +++ b/scripts/generate_json_schemas.py @@ -0,0 +1,89 @@ +import json +import sys +from argparse import ArgumentParser +from pathlib import Path +from tempfile import TemporaryDirectory +from typing import Any, Dict, Literal + +from deepdiff import DeepDiff +from pydantic import ConfigDict, TypeAdapter +from typing_extensions import assert_never + +import bioimageio.spec + + +def export_json_schemas_from_type(folder: Path, type_: Any, *, title: str): + adapter = TypeAdapter( + type_, + config=ConfigDict(title=title), + ) + schema = adapter.json_schema() + for version in ("v" + "-".join(bioimageio.spec.__version__.split(".")), "latest"): + write_schema(schema, folder / f"bioimageio_schema_{version}.json") + + +def write_schema(schema: Dict[str, Any], path: Path): + with path.open("w") as f: + json.dump(schema, f, indent=4) + + print(f"written `{path}") + + +def export_json_schemas(dist: Path): + assert dist.exists() + + export_json_schemas_from_type( + dist, + bioimageio.spec.SpecificResourceDescr, + title=f"bioimage.io resource description {bioimageio.spec.__version__}", + ) + + +def parse_args(): + p = ArgumentParser(description="script that generates bioimageio json schemas") + _ = p.add_argument( + "command", choices=["check", "generate"], nargs="?", default="generate" + ) + _ = p.add_argument( + "--dist", nargs="?", default=str((Path(__file__).parent / "../dist").resolve()) + ) + args = p.parse_args() + return args + + +def generate_json_schemas(dist: Path, command: Literal["check", "generate"]): + dist.mkdir(exist_ok=True) + if command == "generate": + export_json_schemas(dist) + elif command == "check": + existing_schemas = { + p.name: p for p in Path(dist).glob("bioimageio_schema_*.json") + } + with TemporaryDirectory() as tmp_name: + dist = Path(tmp_name) + export_json_schemas(dist) + generated_schemas = { + p.name: p for p in dist.glob("bioimageio_schema_*.json") + } + missing_generated = set(existing_schemas).difference(set(generated_schemas)) + assert not missing_generated, missing_generated + generated_in_addition = set(existing_schemas).difference( + set(generated_schemas) + ) + assert not generated_in_addition, generated_in_addition + for name, existing_p in existing_schemas.items(): + with existing_p.open() as f: + existing = json.load(f) + + with generated_schemas[name].open() as f: + generated = json.load(f) + + diff: Any = DeepDiff(existing, generated) + assert not diff, diff.pretty() + else: + assert_never(command) + + +if __name__ == "__main__": + args = parse_args() + sys.exit(generate_json_schemas(Path(args.dist), args.command)) diff --git a/scripts/generate_json_specs.py b/scripts/generate_json_specs.py deleted file mode 100644 index d0414afc6..000000000 --- a/scripts/generate_json_specs.py +++ /dev/null @@ -1,52 +0,0 @@ -import json -from pathlib import Path - -from marshmallow_jsonschema import JSONSchema - -import bioimageio.spec - -try: - from typing import get_args -except ImportError: - from typing_extensions import get_args # type: ignore - - -def export_json_schema_from_schema(folder: Path, spec): - type_or_version = spec.__name__.split(".")[-1] - format_version_wo_patch = "_".join(spec.format_version.split(".")[:2]) - if type_or_version[1:] == format_version_wo_patch: - type_ = spec.__name__.split(".")[-2] - else: - format_version_wo_patch = "latest" - type_ = type_or_version - - path = folder / f"{type_}_spec_{format_version_wo_patch}.json" - - if type_ == "rdf": - type_ = "RDF" - else: - type_ = type_.title() - - with path.open("w") as f: - json_schema = JSONSchema().dump(getattr(spec.schema, type_)()) - json.dump(json_schema, f, indent=4, sort_keys=True) - - -if __name__ == "__main__": - dist = Path(__file__).parent / "../dist" - dist.mkdir(exist_ok=True) - - import bioimageio.spec.rdf.v0_2 - import bioimageio.spec.collection.v0_2 - import bioimageio.spec.dataset.v0_2 - import bioimageio.spec.model.v0_3 - import bioimageio.spec.model.v0_4 - - export_json_schema_from_schema(dist, bioimageio.spec.rdf) - export_json_schema_from_schema(dist, bioimageio.spec.rdf.v0_2) - export_json_schema_from_schema(dist, bioimageio.spec.collection) - export_json_schema_from_schema(dist, bioimageio.spec.collection.v0_2) - export_json_schema_from_schema(dist, bioimageio.spec.dataset.v0_2) - export_json_schema_from_schema(dist, bioimageio.spec.model) - export_json_schema_from_schema(dist, bioimageio.spec.model.v0_3) - export_json_schema_from_schema(dist, bioimageio.spec.model.v0_4) diff --git a/scripts/generate_passthrough_modules.py b/scripts/generate_passthrough_modules.py deleted file mode 100644 index f03360cc8..000000000 --- a/scripts/generate_passthrough_modules.py +++ /dev/null @@ -1,160 +0,0 @@ -import os -import re -import sys -from argparse import ArgumentParser -from dataclasses import dataclass, field -from pathlib import Path -from typing import Iterable - -_script_path = Path(__file__).parent - -autogen_header = "# Auto-generated by generate_passthrough_modules.py - do not modify\n\n" -autogen_text = autogen_header + "from .{spec_version}.{stem} import *\n" - -version_module_pattern = r"v(?P\d+)_(?P\d+)" - - -@dataclass -class Config: - target_rdf: str - target_version: str - main_module_path: Path = field(init=False) - versioned_module_path: Path = field(init=False) - - def __post_init__(self): - self.main_module_path = (_script_path.parent / "bioimageio" / "spec" / self.target_rdf).resolve() - self.versioned_module_path = ( - _script_path.parent / "bioimageio" / "spec" / self.target_rdf / self.target_version - ).resolve() - - -def remove_autogen_mods(config: Config): - for f in config.main_module_path.glob("*.py"): - mod_txt = f.read_text() - m = re.match(autogen_header + rf"from \.{version_module_pattern}\.", mod_txt) - if m: - print(f"Deleting {f} (linked version {m['major']}.{m['minor']})") - f.unlink() - - -def updated_init_content(config: Config) -> str: - restr = "# autogen: start\n.*# autogen: stop" - - init_file = config.main_module_path / "__init__.py" - assert init_file.exists() - versioned_init = config.versioned_module_path / "__init__.py" - module_init = config.main_module_path / "__init__.py" - vx_init = module_init.read_text() - if not re.findall(restr, vx_init, flags=re.DOTALL): - raise RuntimeError( - f"Could not find autogen markers in {module_init}. Expected to find\n\n# autogen: start\n...\n# autogen: stop\n\nin your __init__." - ) - return re.sub(restr, f"# autogen: start\n{versioned_init.read_text()}\n# autogen: stop", vx_init, flags=re.DOTALL) - - -def update_init(config: Config): - module_init = config.main_module_path / "__init__.py" - module_init.write_text(updated_init_content(config)) - - -def add_autogen_mods(config: Config): - for f in config.versioned_module_path.glob("*.py"): - if f.name.startswith("__"): - continue - - tmp = config.main_module_path / f.name - tmp.write_text(autogen_text.format(spec_version=config.target_version, stem=f.stem)) - - -def is_valid_generated_module(module_file: Path, spec_version: str): - module_txt = module_file.read_text() - if module_txt == autogen_text.format(spec_version=spec_version, stem=module_file.stem): - return True - - return False - - -def check_main(config) -> int: - print(f"Checking `bioimageio.spec` modules to link against {config.target_version}.") - print(f"Assuming module location {config.main_module_path}, with target spec in {config.versioned_module_path}.") - - ret = 0 - for f in config.versioned_module_path.glob("*.py"): - if f.name == "__init__.py": - continue - if not (config.main_module_path / f.name).exists() or not is_valid_generated_module( - config.main_module_path / f.name, config.target_version - ): - ret += 1 - print(f"Could not find {config.main_module_path / f.name}") - - if ret == 0: - print("All seems fine.") - else: - print("Issues found, try regenerating.") - return ret - - -def generate_main(config: Config) -> int: - print(f"Generating `bioimageio.spec` modules to link against {config.target_version}.") - - remove_autogen_mods(config) - add_autogen_mods(config) - update_init(config) - - return 0 - - -def parse_args(): - p = ArgumentParser( - description=( - "script that generates Python module files in bioimageio.spec that " - "'link' to a certain spec version. The generated modules act as pass" - "-through, via `from .vX_Y import *" - ) - ) - p.add_argument("command", choices=["check", "generate"]) - target_choices = ["rdf", "collection", "model", "dataset"] - p.add_argument( - "--target-rdf", - choices=target_choices, - default=list(target_choices), - help="RDF submodules for which the latest format version is made available in `bioimageio.spec.`.", - type=str, - action="append", - ) - - args = p.parse_args() - return args - - -def get_ordered_version_submodules(target_rdf: str): - matches: Iterable[re.Match] = filter( - None, - [ - re.fullmatch(version_module_pattern, f.name) - for f in os.scandir(_script_path.parent / "bioimageio" / "spec" / target_rdf) - if f.is_dir() - ], - ) - matches = sorted(matches, key=lambda m: (m["major"], m["minor"])) - return [m.string for m in matches] - - -def main(): - args = parse_args() - code = 0 - for target_rdf in args.target_rdf: - config = Config(target_rdf=target_rdf, target_version=get_ordered_version_submodules(target_rdf)[-1]) - if args.command == "check": - code += check_main(config) - elif args.command == "generate": - code += generate_main(config) - else: - raise NotImplementedError(args.command) - - return code - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/scripts/generate_processing_docs.py b/scripts/generate_processing_docs.py deleted file mode 100644 index 44ebb7d9a..000000000 --- a/scripts/generate_processing_docs.py +++ /dev/null @@ -1,130 +0,0 @@ -import dataclasses -import inspect -from pathlib import Path -from typing import List, Tuple, Type - -import bioimageio.spec.model -from bioimageio.spec.shared.schema import SharedProcessingSchema -from bioimageio.spec.shared.utils import get_ref_url, resolve_bioimageio_descrcription, snake_case_to_camel_case - -REFERENCE_IMPLEMENTATIONS_SOURCE = "https://github.com/bioimage-io/core-bioimage-io-python/blob/main/bioimageio/core/prediction_pipeline/_processing.py" - - -@dataclasses.dataclass -class Kwarg: - name: str - optional: bool - description: str - - -@dataclasses.dataclass -class ProcessingDocNode: - name: str - description: str - kwargs: List[Kwarg] - reference_implemation: str - - -@dataclasses.dataclass -class PreprocessingDocNode(ProcessingDocNode): - prefix = "pre" - - -@dataclasses.dataclass -class PostprocessingDocNode(ProcessingDocNode): - prefix = "post" - - -def get_docs(schema) -> Tuple[List[PreprocessingDocNode], List[PostprocessingDocNode]]: - """retrieve docs for pre- and postprocessing from schema definitions - - using that pre- and postprocessings are defined as member classes that inherit from SharedProcessingSchema - """ - - def get_kwargs_doc(Sch: Type[SharedProcessingSchema]) -> List[Kwarg]: - return sorted( - [ - Kwarg( - name=name, - optional=not f.required or bool(f.missing), - description=resolve_bioimageio_descrcription(f.bioimageio_description), - ) - for name, f in Sch().fields.items() - ], - key=lambda kw: (kw.optional, kw.name), - ) - - preps = [ - PreprocessingDocNode( - name=name, - description=resolve_bioimageio_descrcription(member.bioimageio_description), - kwargs=get_kwargs_doc(member), - reference_implemation=get_ref_url( - "class", snake_case_to_camel_case(name), REFERENCE_IMPLEMENTATIONS_SOURCE - ), - ) - for name, member in inspect.getmembers(schema.Preprocessing) - if inspect.isclass(member) and issubclass(member, SharedProcessingSchema) - ] - posts = [ - PostprocessingDocNode( - name=name, - description=resolve_bioimageio_descrcription(member.bioimageio_description), - kwargs=get_kwargs_doc(member), - reference_implemation=get_ref_url( - "class", snake_case_to_camel_case(name), REFERENCE_IMPLEMENTATIONS_SOURCE - ), - ) - for name, member in inspect.getmembers(schema.Postprocessing) - if inspect.isclass(member) and issubclass(member, SharedProcessingSchema) - ] - return preps, posts - - -def markdown_from_docs(doc_nodes: List[ProcessingDocNode], title: str, description: str): - md = f"# {title}\n{description}\n" - - for doc in doc_nodes: - md += f"### `{doc.name}`\n{doc.description}\n" - if doc.kwargs: - md += f"- key word arguments:\n" - for kwarg in doc.kwargs: - md += f" - `{'[' if kwarg.optional else ''}{kwarg.name}{']' if kwarg.optional else ''}` {kwarg.description}\n" - md += f"- reference implementation: {doc.reference_implemation}\n" - - return md - - -def export_markdown_docs(folder: Path, spec) -> None: - model_or_version = spec.__name__.split(".")[-1] - format_version_wo_patch = ".".join(spec.format_version.split(".")[:2]) - if model_or_version == "model": - format_version_file_name = "latest" - else: - format_version_file_name = format_version_wo_patch.replace(".", "_") - - for docs in get_docs(spec.schema): - assert isinstance(docs, list) - prefix = docs[0].prefix - md = markdown_from_docs( - docs, - title=f"{prefix.title()}processing operations in model spec {format_version_wo_patch}", - description=( - f"The supported operations that are valid in {prefix}processing. " - "IMPORTANT: these operations must return float32 tensors, so that their output can be consumed by the " - "models." - ), - ) - path = folder / f"{prefix}processing_spec_{format_version_file_name}.md" - path.write_text(md, encoding="utf-8") - - -if __name__ == "__main__": - import bioimageio.spec.model.v0_3 - - dist = Path(__file__).parent / "../dist" - dist.mkdir(exist_ok=True) - - export_markdown_docs(dist, bioimageio.spec.model.v0_4) - export_markdown_docs(dist, bioimageio.spec.model.v0_3) - export_markdown_docs(dist, bioimageio.spec.model) diff --git a/scripts/generate_rdf_docs.py b/scripts/generate_rdf_docs.py deleted file mode 100644 index d9c5e6525..000000000 --- a/scripts/generate_rdf_docs.py +++ /dev/null @@ -1,192 +0,0 @@ -import dataclasses -import inspect -import typing -from pathlib import Path - -import bioimageio.spec.rdf -from bioimageio.spec.shared import fields - -try: - from typing import get_args -except ImportError: - from typing_extensions import get_args # type: ignore - - -@dataclasses.dataclass -class DocNode: - type_name: str - short_description: str - description: str - sub_docs: typing.List[typing.Tuple[str, "DocNode"]] - details: typing.List["DocNode"] - many: bool # expecting a list of the described sub spec - optional: bool - maybe_optional: bool - - def __post_init__(self): - assert not (self.sub_docs and self.details) - - -def doc_from_schema(obj, spec) -> DocNode: - if obj is None: - return DocNode( - type_name="Any", - short_description="", - description="", - sub_docs=[], - details=[], - many=False, - optional=False, - maybe_optional=False, - ) - elif isinstance(obj, fields.Nested): - type_name = obj.type_name - many = obj.many - short_description = obj.short_bioimageio_description - description = obj.bioimageio_description - maybe_required = obj.bioimageio_maybe_required - required = obj.required - obj = obj.nested - else: - type_name = "" - short_description = obj.short_bioimageio_description - description = obj.bioimageio_description - many = False - maybe_required = False - required = True - - if callable(description): - description = description() - - if callable(short_description): - short_description = short_description() - - details = [] - sub_docs = [] - if inspect.isclass(obj) and issubclass(obj, spec.schema.SharedBioImageIOSchema): - obj = obj() - - if isinstance(obj, spec.schema.SharedBioImageIOSchema): - - def sort_key(name_and_nested_field): - name, nested_field = name_and_nested_field - if nested_field.bioimageio_description_order is None: - manual_order = "" - else: - manual_order = f"{nested_field.bioimageio_description_order:09}" - - return f"{manual_order}{int(not nested_field.required)}{name}" - - sub_fields = sorted(obj.fields.items(), key=sort_key) - sub_docs = [(name, doc_from_schema(nested_field, spec)) for name, nested_field in sub_fields] - else: - type_name += obj.type_name - required = obj.required - maybe_required = obj.bioimageio_maybe_required - if isinstance(obj, fields.Union): - details = [doc_from_schema(opt, spec) for opt in obj._candidate_fields] - elif isinstance(obj, fields.Dict): - details = [doc_from_schema(obj.key_field, spec), doc_from_schema(obj.value_field, spec)] - elif isinstance(obj, fields.List): - details = [doc_from_schema(obj.inner, spec)] - else: - assert isinstance(obj, fields.DocumentedField), (type(obj), obj) - - return DocNode( - type_name=type_name, - short_description=short_description, - description=description, - sub_docs=[(name, d) for name, d in sub_docs if d.description or d.sub_docs or d.details], - details=[d for d in details if d.description or d.sub_docs or d.details], - many=many, - optional=not required, - maybe_optional=maybe_required, - ) - - -def markdown_from_doc( - doc: DocNode, parent_names: typing.Sequence[str] = tuple(), neither_opt_nor_req: bool = False, indent_lvl: int = 0 -): - if doc.sub_docs: - sub_docs = [(name, sdn) for name, sdn in doc.sub_docs] - enumerate_symbol: typing.Optional[str] = "*" - elif doc.details: - sub_docs = [("", sdn) for sdn in doc.details] - enumerate_symbol = "1." - else: - sub_docs = [] - enumerate_symbol = None - - n_o_n_r = neither_opt_nor_req or doc.type_name.startswith("List") or doc.type_name.startswith("Dict") - sub_doc = "" - if not doc.short_description: - for name, sdn in sub_docs: - field_path = [n for n in [*parent_names, name] if n] - assert isinstance(name, str), name # earlier version allowed DocNode here - name = f'`{name}`' if name else "" - entry = markdown_from_doc(sdn, field_path, neither_opt_nor_req=n_o_n_r, indent_lvl=indent_lvl + 1) - if entry: - sub_doc += f"{enumerate_symbol} {name} {entry}" - - if doc.type_name: - opt = ( - "" - if neither_opt_nor_req - else "optional* " - if doc.maybe_optional - else "optional " - if doc.optional - else "required " - ) - type_name = f"_({opt}{doc.type_name})_ " - else: - type_name = "" - - md_doc = f"{type_name}{doc.short_description or doc.description}\n{sub_doc}" - indent = " " - if indent_lvl: - md_doc = f"\n{indent}".join(md_doc.strip().split("\n")) - - return md_doc + "\n" - - -def export_markdown_doc(folder: Path, spec) -> None: - type_or_version = spec.__name__.split(".")[-1] - format_version_wo_patch = "_".join(spec.format_version.split(".")[:2]) - if type_or_version[1:] == format_version_wo_patch: - type_ = spec.__name__.split(".")[-2] - else: - format_version_wo_patch = "latest" - type_ = type_or_version - - path = folder / f"{type_}_spec_{format_version_wo_patch}.md" - - if type_ == "rdf": - type_ = "RDF" - else: - type_ = type_.title() - - doc = doc_from_schema(getattr(spec.schema, type_)(), spec) - md_doc = markdown_from_doc(doc) - path.write_text(md_doc, encoding="utf-8") - - -if __name__ == "__main__": - import bioimageio.spec.collection.v0_2 - import bioimageio.spec.dataset.v0_2 - import bioimageio.spec.model.v0_3 - import bioimageio.spec.model.v0_4 - import bioimageio.spec.rdf.v0_2 - - dist = Path(__file__).parent / "../dist" - dist.mkdir(exist_ok=True) - - export_markdown_doc(dist, bioimageio.spec.collection) - export_markdown_doc(dist, bioimageio.spec.collection.v0_2) - export_markdown_doc(dist, bioimageio.spec.dataset) - export_markdown_doc(dist, bioimageio.spec.dataset.v0_2) - export_markdown_doc(dist, bioimageio.spec.model) - export_markdown_doc(dist, bioimageio.spec.model.v0_3) - export_markdown_doc(dist, bioimageio.spec.model.v0_4) - export_markdown_doc(dist, bioimageio.spec.rdf) - export_markdown_doc(dist, bioimageio.spec.rdf.v0_2) diff --git a/scripts/generate_spec_documentation.py b/scripts/generate_spec_documentation.py new file mode 100644 index 000000000..1f2ac940e --- /dev/null +++ b/scripts/generate_spec_documentation.py @@ -0,0 +1,537 @@ +from __future__ import annotations + +import shutil +from argparse import ArgumentParser +from collections import OrderedDict +from dataclasses import dataclass, field +from pathlib import Path +from pprint import pformat +from types import ModuleType +from typing import Any, Dict, Iterator, List, Optional, Tuple, Type, get_args + +from pydantic.alias_generators import to_pascal, to_snake +from pydantic.fields import FieldInfo +from pydantic_core import PydanticUndefined + +from bioimageio.spec import ( + ResourceDescr, + application, + collection, + dataset, + generic, + model, + notebook, +) +from bioimageio.spec._internal.common_nodes import Node +from bioimageio.spec._internal.constants import IN_PACKAGE_MESSAGE +from bioimageio.spec._internal.utils import unindent + +Loc = Tuple[str, ...] + +ANNOTATION_MAP = { + "pydantic_core._pydantic_core.Url": "Url", + "typing.": "", + "pathlib.": "", + "bioimageio.spec._internal.common_nodes.FrozenDictNode": "Dict", + "bioimageio.spec._internal.common_nodes.Kwargs": "Dict", + "bioimageio.spec.types.": "", + "pydantic.networks.EmailStr": "Email", + "bioimageio.spec.": "", + "NoneType": "None", + "Ellipsis": "...", + "PathType(path_type='dir')": "Directory", +} +MAX_LINE_WIDTH = 120 + +ADDITIONAL_DESCRIPTION_ANY_RESOURCE = ( + "\n**General notes on this documentation:**\n" + "| symbol | explanation |\n" + "| --- | --- |\n" + "| `field`type hint | A fields's expected type may be" + " shortened. " + "If so, the abbreviated or full type is displayed below the field's description and" + " can expanded to view " + "further (nested) details if available. |\n" + "| Union[A, B, ...] | indicates that a field value may be of type A or B, etc.|\n" + "| Literal[a, b, ...] | indicates that a field value must be the specific value a" + " or b, etc.|\n" + "| Type* := Type (restrictions) | A field Type* followed by an asterisk indicates" + " that annotations, e.g. " + "value restriction apply. These are listed in parentheses in the expanded type" + " description. " + "They are not always intuitively understandable and merely a hint at more complex" + " validation.|\n" + r"| \.v\_\.\ | " + "Subparts of a spec might be taken from another spec type or format version. |\n" + "| `field` โ‰ `default` | Default field values are indicated after 'โ‰' and make a" + " field optional. " + "However, `type` and `format_version` alwyas need to be set for resource" + " descriptions written as YAML files " + "and determine which bioimage.io specification applies. They are optional only when" + " creating a resource " + "description in Python code using the appropriate, `type` and `format_version`" + " specific class.|\n" + "| `field` โ‰ ๐Ÿก‡ | Default field value is not displayed in-line, but in the code" + " block below. |\n" + f"| {IN_PACKAGE_MESSAGE} | Files referenced in fields which are marked with" + f" '{IN_PACKAGE_MESSAGE}' " + "are included when packaging the resource to a .zip archive. " + "The resource description YAML file (RDF) is always included well as" + " 'rdf.yaml'. |\n" +) + + +def anchor_tag(heading: str): + a = heading.strip().strip("#") + for rm in ",;!?./<>=`'\"": + a = a.replace(rm, "") + + return "#" + a.replace(" ", "-") + + +def get_subnodes(loc: Loc, annotation: Any) -> Iterator[Tuple[Loc, Type[Node]]]: + try: + is_node = issubclass(annotation, Node) + except TypeError: + is_node = False + + if is_node: + yield loc, annotation + else: + for like_list in ["List", "Tuple", "Set"]: + if str(annotation).startswith(f"typing.{like_list}["): + loc = loc[:-1] + (loc[-1] + ".i",) + break + + for sa in get_args(annotation): + yield from get_subnodes(loc, sa) + + +@dataclass +class AnnotationName: + annotation: Any + indent_level: int + footnotes: OrderedDict[str, str] + full_maybe_multiline: str = field(init=False) + full_inline: str = field(init=False) + abbreviated: Optional[str] = field(init=False) + kind: str = field(init=False) + + annotation_map: Dict[str, str] + + def __post_init__(self): + self.full_maybe_multiline = self.get_name( + self.annotation, abbreviate=False, inline=False + ) + self.full_inline = self.get_name(self.annotation, abbreviate=False) + self.kind = self._get_kind() + if self.indent_level + len(self.full_inline) > MAX_LINE_WIDTH: + self.abbreviated = self.get_name(self.annotation, abbreviate=True) + else: + self.abbreviated = None + + def _get_kind(self): + s = self.full_inline + brackets = 0 + max_balance = -1 + for i in range(min(len(s), 32)): + if s[i] == "[": + brackets += 1 + + if s[i] == "]": + brackets -= 1 + + if brackets == 0: + max_balance = i + + return s[: max_balance + 1] + + def slim(self, s: str): + """shortening that's always OK""" + s = s.strip("'\"") + if s.startswith(""): + s = s[len(" str: + if isinstance(t, FieldInfo): + parts = list(t.metadata) + if t.discriminator: + parts.append(f"discriminator={t.discriminator}") + + return "; ".join(parts) + + s = self.slim(str(t)) + if s.startswith("Annotated["): + args = get_args(t) + if abbreviate: + return f"{self.get_name(args[0], abbreviate, inline, multiline_level)}*" + + annotated_type = self.get_name(args[0], abbreviate, inline, multiline_level) + annos = f"({'; '.join([self.get_name(tt, abbreviate, inline, multiline_level) for tt in args[1:]])})" + if ( + inline + or abbreviate + or ( + multiline_level + len(annotated_type) + 1 + len(annos) + < MAX_LINE_WIDTH + ) + ): + anno_sep = " " + else: + anno_sep = "\n" + " " * multiline_level * 2 + + return f"{annotated_type}{anno_sep}{annos}" + + if s.startswith("Optional["): + return f"Optional[{self.get_name(get_args(t)[0], abbreviate, inline, multiline_level)}]" + + for format_like_seq in ["Union", "Tuple", "Literal", "Dict", "List", "Set"]: + if not s.startswith(format_like_seq): + continue + + args = get_args(t) + if format_like_seq == "Tuple" and len(args) == 2 and args[1] == ...: + args = args[:1] + + format_like_seq_name = self.more_common_sequence_name(format_like_seq) + + if len(args) > 4 and abbreviate: + args = [args[0], "...", args[-1]] + + parts = [ + self.get_name(tt, abbreviate, inline, multiline_level) for tt in args + ] + one_line = f"{format_like_seq_name}[{', '.join(parts)}]" + if ( + abbreviate + or inline + or (self.indent_level + len(one_line) < MAX_LINE_WIDTH) + ): + return one_line + + first_line_descr = f"{format_like_seq_name} of" + if len(args) == 1: + more_maybe_multiline = self.get_name( + args[0], + abbreviate=abbreviate, + inline=inline, + multiline_level=multiline_level, + ) + return first_line_descr + " " + more_maybe_multiline + + parts = [ + self.get_name( + tt, abbreviate, inline=inline, multiline_level=multiline_level + 1 + ) + for tt in args + ] + multiline_parts = f"\n{' '* multiline_level * 2}- ".join(parts) + return ( + f"{first_line_descr}\n{' '* multiline_level * 2}- {multiline_parts}\n" + ) + + return s + + +class Field: + STYLE_SWITCH_DEPTH = 4 + + def __init__( + self, + loc: Loc, + info: FieldInfo, + *, + footnotes: OrderedDict[str, str], + rd_class: type[ResourceDescr], + all_examples: List[Tuple[str, List[Any]]], + ) -> None: + super().__init__() + assert loc + self.loc = loc + self.info = info + self.footnotes = footnotes + self.annotation_map = {f"{rd_class.__module__}.": "", **ANNOTATION_MAP} + self.rd_class = rd_class + self.all_examples = all_examples + + @property + def indent_with_symbol(self): + spaces = " " * max(0, self.indent_level - 2) + if len(self.loc) <= self.STYLE_SWITCH_DEPTH: + symbol = f"#{'#'* len(self.loc)} " + else: + symbol = "* " + + return f"{spaces}{symbol}" + + @property + def indent_level(self): + return max(0, len(self.loc) - self.STYLE_SWITCH_DEPTH) * 2 + + @property + def indent_spaces(self): + return " " * self.indent_level + + @property + def name(self): + n = ".".join(self.loc) + if len(self.loc) <= self.STYLE_SWITCH_DEPTH: + return f"`{n}`" + else: + return f'`{n}`' + + def get_explanation(self): + title = self.info.title or "" + description = unindent(self.info.description or "", ignore_first_line=True) + ret = self.indent_spaces + if title: + ret += f"{title}: " + if "\n" in description or len(ret) + len(description) > MAX_LINE_WIDTH: + ret += "\n" + + ret += description.strip() + "\n" + + if self.info.examples: + ex = "Example" if len(self.info.examples) == 1 else "Examples" + ex = f"*{ex}:*" + if len(self.info.examples) == 1: + e = self.info.examples[0] + example_inline = f"'{e}'" if isinstance(e, str) else str(e) + else: + example_inline = str(self.info.examples) + if self.indent_level + len(example_inline) > MAX_LINE_WIDTH: + for i in range(len(self.info.examples) - 1, 0, -1): + example_inline = str(self.info.examples[:i] + ["โ€ฆ"]) + if self.indent_level + len(example_inline) <= MAX_LINE_WIDTH: + break + + ret += f"[{ex}]({anchor_tag(self.name)}) {example_inline}\n" + self.all_examples.append((self.name, self.info.examples)) + + return ret.replace("\n", self.indent_spaces + "\n") + + def get_default_value(self): + d = self.info.get_default(call_default_factory=True) + if d is PydanticUndefined: + return "" + # elif d == "": + # d = "" + d_inline = f"`{d}`" + if self.indent_level + 30 + len(d_inline) > MAX_LINE_WIDTH: + return f" โ‰ ๐Ÿก‡\n```python\n{pformat(d, indent=self.indent_level, width=MAX_LINE_WIDTH)}\n```\n" + else: + return f" โ‰ {d_inline}" + + def get_md(self) -> str: + nested = "" + for subloc, subnode in get_subnodes(self.loc, self.info.annotation): + sub_anno = AnnotationName( + annotation=subnode, + footnotes=self.footnotes, + indent_level=self.indent_level + 2, + annotation_map=self.annotation_map, + ).full_inline + subfields = "" + for sfn, sinfo in subnode.model_fields.items(): + subfields += ( + "\n" + + Field( + subloc + (sfn,), + sinfo, + footnotes=self.footnotes, + rd_class=self.rd_class, + all_examples=self.all_examples, + ).get_md() + ) + if subfields: + nested += f"\n{self.indent_spaces}**{sub_anno}:**{subfields}" + + an = AnnotationName( + annotation=self.info.annotation, + footnotes=self.footnotes, + indent_level=self.indent_level, + annotation_map=self.annotation_map, + ) + first_line = ( + f"{self.indent_with_symbol}{self.name}" + f" {an.kind}{self.get_default_value()}\n" + ) + if (nested or an.abbreviated) and len(self.loc) <= self.STYLE_SWITCH_DEPTH: + if an.abbreviated is None: + expaned_type_anno = "" + else: + expaned_type_anno = an.full_maybe_multiline + "\n" + + ret = ( + f"{first_line}{self.get_explanation()}\n" + f"
{an.abbreviated or an.full_inline}\n\n\n\n" + f"{expaned_type_anno}{nested}\n
\n" + ) + else: + if an.kind == an.full_inline: + expaned_type_anno = "" + else: + expaned_type_anno = "\n" + an.full_inline + + ret = f"{first_line}{self.get_explanation()}\n{expaned_type_anno}{nested}\n" + + return ret + + +def get_documentation_file_name( + rd_class: Type[ResourceDescr], *, latest: bool = False, minor: bool = False +): + assert not (latest and minor) + typ = to_snake(rd_class.__name__) + if latest: + v = "latest" + elif minor: + v = "v" + "-".join(rd_class.implemented_format_version.split(".")[:2]) + else: + v = f"v{rd_class.implemented_format_version.replace('.', '-')}" + + return f"{typ}_{v}.md" + + +def export_documentation(folder: Path, rd_class: Type[ResourceDescr]) -> Path: + footnotes: OrderedDict[str, str] = OrderedDict() + all_examples: List[Tuple[str, List[Any]]] = [] + md = ( + "# " + + (rd_class.model_config.get("title") or "") + + "\n" + + ( + unindent(rd_class.__doc__ or "", ignore_first_line=True) + + ADDITIONAL_DESCRIPTION_ANY_RESOURCE + ) + ) + all_fields = [ + (fn, info) + for fn, info in rd_class.model_fields.items() + if fn not in ("type", "format_version") + ] + + def field_sort_key(fn_info: Tuple[str, FieldInfo]) -> Tuple[bool, str]: + fn, info = fn_info + return ( + info.get_default(call_default_factory=True) is not PydanticUndefined, + fn, + ) + + all_fields = sorted(all_fields, key=field_sort_key) + + field_names = ["type", "format_version"] + [fn for (fn, _) in all_fields] + for field_name in field_names: + info = rd_class.model_fields[field_name] + md += ( + "\n" + + Field( + (field_name,), + info, + footnotes=footnotes, + rd_class=rd_class, + all_examples=all_examples, + ).get_md() + ) + + md += "\n" + for i, full in enumerate(footnotes, start=1): + md += f"\n[^{i}]: {full}" + + if all_examples: + md += "# Example values\n" + for name, examples in all_examples: + if len(examples) == 1: + formatted_examples = str(examples[0]) + else: + formatted_examples = "".join(f"- {ex}\n" for ex in examples) + + md += f"### {name}\n{formatted_examples}\n" + + if footnotes: + md += "\n" + + for file_path in [ + folder / get_documentation_file_name(rd_class, minor=True), + folder / get_documentation_file_name(rd_class), + ]: + _ = file_path.write_text(md, encoding="utf-8") + print(f"written {file_path}") + + return file_path # type: ignore + + +def export_module_documentations(folder: Path, module: ModuleType): + rd_name = to_pascal(module.__name__.split(".")[-1]) + "Descr" + + rd_class = None + latest = None + v = None + for v in sorted(dir(module)): + v_module = getattr(module, v) + if not hasattr(v_module, rd_name): + continue + + rd_class = getattr(v_module, rd_name) + latest = export_documentation(folder, rd_class) + + assert latest is not None + assert rd_class is not None + shutil.copy( + str(latest), folder / get_documentation_file_name(rd_class, latest=True) + ) + print(f" copied {latest} as latest") + + +def main(dist: Path): + dist.mkdir(exist_ok=True, parents=True) + + export_module_documentations(dist, application) + export_module_documentations(dist, collection) + export_module_documentations(dist, dataset) + export_module_documentations(dist, generic) + export_module_documentations(dist, model) + export_module_documentations(dist, notebook) + + +def parse_args(): + p = ArgumentParser( + description="script that generates bioimageio user documentation" + ) + _ = p.add_argument( + "--dist", + nargs="?", + default=str((Path(__file__).parent / "../user_docs").resolve()), + ) + args = p.parse_args() + return args + + +if __name__ == "__main__": + args = parse_args() + main(dist=Path(args.dist)) diff --git a/scripts/generate_version_submodule_imports.py b/scripts/generate_version_submodule_imports.py new file mode 100644 index 000000000..16afa70c3 --- /dev/null +++ b/scripts/generate_version_submodule_imports.py @@ -0,0 +1,188 @@ +import re +import sys +from argparse import ArgumentParser +from dataclasses import dataclass, field +from difflib import ndiff +from pathlib import Path +from typing import List, Literal + +import black.files +import black.mode + +ROOT_PATH = Path(__file__).parent.parent + +AUTOGEN_START = "# autogen: start\n" +AUTOGEN_BODY_SINGLE = """from . import {info.all_version_modules_import_as} +from .{info.latest_version_module} import {info.target_node} as {info.target_node} + +Any{info.target_node} = {info.target_node} +""" +AUTOGEN_BODY_MULTIPLE = """\"\"\" +implementaions of all released minor versions are available in submodules: +{info.submodule_list} +\"\"\" +from typing import Union + +from pydantic import Discriminator +from typing_extensions import Annotated + +{info.all_version_modules_imports} + +Any{info.target_node} = Annotated[Union[{info.all_target_nodes_plain_aliases}], Discriminator("format_version")] +\"\"\"Union of any released {info.target} desription\"\"\" +""" + +AUTOGEN_STOP = "# autogen: stop\n" + +VERSION_MODULE_PATTERN = r"v(?P\d+)_(?P\d+).py" + + +def main(command: Literal["check", "generate"]): + for target in [ + "generic", + "model", + "dataset", + "collection", + "notebook", + "application", + ]: + process( + Info( + target=target, + all_version_modules=get_ordered_version_submodules(target), + ), + check=command == "check", + ) + + return 0 + + +def parse_args(): + p = ArgumentParser( + description=( + "script that generates imports in bioimageio.spec resource description" + " submodules" + ) + ) + _ = p.add_argument( + "command", choices=["check", "generate"], nargs="?", default="generate" + ) + args = p.parse_args() + return args + + +@dataclass +class Info: + target: str + all_version_modules: List[str] + target_node: str = field(init=False) + all_target_nodes_plain: str = field(init=False) + all_target_nodes_plain_aliases: str = field(init=False) + latest_version_module: str = field(init=False) + all_version_modules_import_as: str = field(init=False) + all_version_modules_imports: str = field(init=False) + package_path: Path = field(init=False) + submodule_list: str = field(init=False) + + def __post_init__(self): + self.target_node = self.target.capitalize() + "Descr" + self.all_target_nodes_plain = ", ".join( + [f"{vm}.{self.target_node}" for vm in self.all_version_modules] + ) + self.all_target_nodes_plain_aliases = ", ".join( + [f"{self.target_node}_{vm}" for vm in self.all_version_modules] + ) + self.latest_version_module = self.all_version_modules[-1] + self.all_version_modules_import_as = ", ".join( + f"{m} as {m}" for m in self.all_version_modules + ) + + avmi = [ + f"from .{m} import {self.target_node} as {self.target_node}_{m}" + for m in self.all_version_modules + ] + avmi.insert( + -1, + f"from .{self.latest_version_module} import {self.target_node} as " + + f"{self.target_node}", + ) + self.all_version_modules_imports = "\n".join(avmi) + + self.package_path = (ROOT_PATH / "bioimageio" / "spec" / self.target).resolve() + self.submodule_list = "\n".join( + [ + f"- {self.target} {vm}: `bioimageio.spec.{self.target}.{vm}." + + f"{self.target_node}` [user documentation](../../../user_docs/" + + f"{self.target}_descr_{vm.replace('_', '-')}.md)" + for vm in self.all_version_modules + ] + ) + + +def process(info: Info, check: bool): + package_init = info.package_path / "__init__.py" + print(f"{'Checking' if check else 'Updating'} {package_init}") + + init_content = package_init.read_text() + pattern = AUTOGEN_START + ".*" + AUTOGEN_STOP + flags = re.DOTALL + if not re.findall(pattern, init_content, flags=flags): + raise RuntimeError( + f"Could not find autogen markers in {package_init}. Expected to" + + f" find:\n{AUTOGEN_START}...{AUTOGEN_STOP}" + ) + + autogen_body = ( + AUTOGEN_BODY_SINGLE + if len(info.all_version_modules) == 1 + else AUTOGEN_BODY_MULTIPLE + ) + updated = re.sub( + pattern, + AUTOGEN_START + autogen_body.format(info=info) + AUTOGEN_STOP, + init_content, + flags=flags, + ) + black_config = black.files.parse_pyproject_toml(str(ROOT_PATH / "pyproject.toml")) + black_config["target_versions"] = set( + ( + getattr(black.mode.TargetVersion, tv.upper()) + for tv in black_config.pop("target_version") + ) + ) + updated = black.format_str(updated, mode=black.mode.Mode(**black_config)) + if check: + if init_content == updated: + print("all seems fine") + else: + raise RuntimeError( + "call with mode 'generate' to update:\n" + + "".join( + ndiff( + init_content.splitlines(keepends=True), + updated.splitlines(keepends=True), + ) + ) + ) + else: + _ = package_init.write_text(updated) + + +def get_ordered_version_submodules(target: str): + matches = [ + m + for p in (ROOT_PATH / "bioimageio" / "spec" / target).iterdir() + if p.is_file() and (m := re.fullmatch(VERSION_MODULE_PATTERN, p.name)) + ] + if not matches: + raise RuntimeError(f"No version modules found for target '{target}'") + + return [ + m.string[:-3] + for m in sorted(matches, key=lambda m: (int(m["major"]), int(m["minor"]))) + ] + + +if __name__ == "__main__": + args = parse_args() + sys.exit(main(args.command)) diff --git a/scripts/generate_weight_formats_docs.py b/scripts/generate_weight_formats_docs.py deleted file mode 100644 index 0f4e6b2d3..000000000 --- a/scripts/generate_weight_formats_docs.py +++ /dev/null @@ -1,120 +0,0 @@ -import dataclasses -from pathlib import Path -from typing import List, Sequence, Tuple, Type - -import bioimageio.spec.model -from bioimageio.spec.model.v0_3.schema import _WeightsEntryBase -from bioimageio.spec.shared.utils import resolve_bioimageio_descrcription - -try: - from typing import get_args -except ImportError: - from typing_extensions import get_args # type: ignore - - -@dataclasses.dataclass -class Kwarg: - name: str - description: str - optional: bool - maybe_optional: bool - - -@dataclasses.dataclass -class WeightsFormatDocNode: - name: str - description: str - kwargs: List[Kwarg] - - -def get_doc(schema) -> Tuple[List[Kwarg], List[WeightsFormatDocNode]]: - """retrieve documentation of weight formats from their definitions schema""" - - def get_kwargs_doc(we: Type[_WeightsEntryBase], exclude: Sequence[str] = tuple()) -> List[Kwarg]: - return sorted( - [ - Kwarg( - name=name, - description=resolve_bioimageio_descrcription(f.bioimageio_description), - optional=not f.required or bool(f.missing), - maybe_optional=f.bioimageio_maybe_required, - ) - for name, f in we().fields.items() - if name != "weights_format" and name not in exclude - ], - key=lambda kw: (kw.optional, kw.name), - ) - - common_kwargs = get_kwargs_doc(_WeightsEntryBase) - - def get_wf_name_from_wf_schema(wfs): - return wfs().fields["weights_format"].validate.comparable - - return ( - common_kwargs, - [ - WeightsFormatDocNode( - name=get_wf_name_from_wf_schema(wfs), - description=resolve_bioimageio_descrcription(wfs.bioimageio_description), - kwargs=get_kwargs_doc(wfs, exclude=[kw.name for kw in common_kwargs]), - ) - for wfs in get_args(schema.WeightsEntry) # schema.WeightsEntry is a typing.Union of weights format schemas - ], - ) - - -def get_md_kwargs(kwargs: Sequence[Kwarg], indent: int = 0): - md = "" - for kwarg in kwargs: - md += ( - f"{' ' * indent}- `{kwarg.name}` _{'optional' if kwarg.optional or kwarg.maybe_optional else 'required'}" - f"{'*' if kwarg.maybe_optional else ''}_ {kwarg.description}\n" - ) - - return md - - -def md_from_doc(doc_nodes: List[WeightsFormatDocNode]): - md = "" - for doc in doc_nodes: - md += f"### `{doc.name}`\n{doc.description}\n" - if doc.kwargs: - md += f"- key word arguments:\n" - md += get_md_kwargs(doc.kwargs, indent=2) - - return md - - -def export_markdown_docs(folder: Path, spec) -> None: - model_or_version = spec.__name__.split(".")[-1] - format_version_wo_patch = ".".join(spec.format_version.split(".")[:2]) - if model_or_version == "model": - format_version_file_name = "latest" - else: - format_version_file_name = format_version_wo_patch.replace(".", "_") - - common_kwargs, doc = get_doc(spec.schema) - md = ( - ( - f"# Weight formats in model spec {format_version_wo_patch}\n" - "## Common key word arguments for all weight formats\n" - "Optional arguments are marked as _optional_ or _optional*_ with an asterisk if they are optional " - "depending on another argument's value.\n\n" - ) - + get_md_kwargs(common_kwargs) - + ("\n## Weight formats and their additional key word arguments\n") - ) - md += md_from_doc(doc) - path = folder / f"weight_formats_spec_{format_version_file_name}.md" - path.write_text(md, encoding="utf-8") - - -if __name__ == "__main__": - import bioimageio.spec.model - - dist = Path(__file__).parent / "../dist" - dist.mkdir(exist_ok=True) - - export_markdown_docs(dist, bioimageio.spec.model.v0_3) - export_markdown_docs(dist, bioimageio.spec.model.v0_4) - export_markdown_docs(dist, bioimageio.spec.model) diff --git a/scripts/generate_weight_formats_overview.py b/scripts/generate_weight_formats_overview.py deleted file mode 100644 index 0594a1d2b..000000000 --- a/scripts/generate_weight_formats_overview.py +++ /dev/null @@ -1,79 +0,0 @@ -import json -import sys -from argparse import ArgumentParser -from pathlib import Path -from urllib.request import urlretrieve - -from bioimageio.spec.model import raw_nodes, schema -from bioimageio.spec.shared import get_args -from bioimageio.spec.shared.utils import resolve_bioimageio_descrcription - -MANIFEST_URL = "https://raw.githubusercontent.com/bioimage-io/bioimage-io-models/gh-pages/manifest.bioimage.io.json" -WEIGHTS_FORMATS_OVERVIEW_PATH = ( - Path(__file__).parent / "../dist" / "weight_formats_spec.json" -) # todo: weight_formats -> weights_formats - -# defaults for transition period -consumer_defaults = { - "ilastik": ["torchscript", "pytorch_state_dict", "onnx"], - "zero": ["keras_hdf5"], - "deepimagej": ["torchscript", "tensorflow_saved_model_bundle"], - "imjoy": ["onnx"], -} - - -def parse_args(): - p = ArgumentParser(description=("script that generates weights formats overview")) - p.add_argument("command", choices=["check", "generate"]) - - args = p.parse_args() - return args - - -def main(args): - local = Path(urlretrieve(MANIFEST_URL)[0]) - with local.open() as f: - collection = json.load(f) - - collection = collection["collections"] - - consumers = {c["id"]: c for c in collection} - for consumer in consumers.values(): - if consumer["id"] in consumer_defaults: - consumer["config"] = consumer.get("config", {}) - consumer["config"]["supported_weight_formats"] = consumer_defaults[consumer["id"]] - - weights_format_ids = get_args(raw_nodes.WeightsFormat) - weights_format_class_names = [wf.title().replace("_", "") + "WeightsEntry" for wf in weights_format_ids] - - weights_formats = { - wf: { - "name": getattr(raw_nodes, wfcn).weights_format_name, - "description": resolve_bioimageio_descrcription(getattr(schema, wfcn).bioimageio_description), - "consumers": [ - cname - for cname, c in consumers.items() - if wf in c.get("supported_weights_formats", consumer_defaults.get(cname, [])) - ], - } - for wf, wfcn in zip(weights_format_ids, weights_format_class_names) - } - - overview = {"consumers": consumers, "weight_formats": weights_formats} # todo: weight_formats -> weights_formats - - if args.command == "generate": - with WEIGHTS_FORMATS_OVERVIEW_PATH.open("w") as f: - json.dump(overview, f, indent=4, sort_keys=True) - elif args.command == "check": - with WEIGHTS_FORMATS_OVERVIEW_PATH.open() as f: - found = json.load(f) - - if found != overview: - return 1 - else: - raise NotImplementedError(args.command) - - -if __name__ == "__main__": - args = parse_args() - sys.exit(main(args)) diff --git a/scripts/report_invalid_rdfs.py b/scripts/report_invalid_rdfs.py new file mode 100644 index 000000000..67cbe9957 --- /dev/null +++ b/scripts/report_invalid_rdfs.py @@ -0,0 +1,243 @@ +from argparse import ArgumentParser +from pathlib import Path +from typing import Literal + +from typing_extensions import assert_never + +from bioimageio.spec import load_description_and_validate_format_only + +# from tests.test_bioimageio_collection import ( +# KNOWN_INVALID, +# KNOWN_INVALID_AS_LATEST, +# RDF_BASE_URL, +# ) +RDF_BASE_URL = "https://bioimage-io.github.io/collection-bioimage-io/rdfs/" + + +KNOWN_INVALID = { + "10.5281/zenodo.5749843/5888237/rdf.yaml", + "10.5281/zenodo.5910163/5942853/rdf.yaml", + "10.5281/zenodo.5910854/6539073/rdf.yaml", + "10.5281/zenodo.5914248/6514622/rdf.yaml", + "10.5281/zenodo.6559929/6559930/rdf.yaml", + "10.5281/zenodo.7614645/7642674/rdf.yaml", + "biapy/biapy/latest/rdf.yaml", + "biapy/notebook_classification_2d/latest/rdf.yaml", + "biapy/Notebook_semantic_segmentation_3d/latest/rdf.yaml", + "deepimagej/deepimagej/latest/rdf.yaml", + "deepimagej/DeepSTORMZeroCostDL4Mic/latest/rdf.yaml", + "deepimagej/Mt3VirtualStaining/latest/rdf.yaml", + "deepimagej/MU-Lux_CTC_PhC-C2DL-PSC/latest/rdf.yaml", + "deepimagej/SkinLesionClassification/latest/rdf.yaml", + "deepimagej/SMLMDensityMapEstimationDEFCoN/latest/rdf.yaml", + "deepimagej/UNet2DGlioblastomaSegmentation/latest/rdf.yaml", + "deepimagej/WidefieldDapiSuperResolution/latest/rdf.yaml", + "deepimagej/WidefieldFitcSuperResolution/latest/rdf.yaml", + "deepimagej/WidefieldTxredSuperResolution/latest/rdf.yaml", + "fiji/N2VSEMDemo/latest/rdf.yaml", + "ilastik/mitoem_segmentation_challenge/latest/rdf.yaml", + "imjoy/LuCa-7color/latest/rdf.yaml", + "zero/Dataset_CARE_2D_coli_DeepBacs/latest/rdf.yaml", + "zero/Dataset_fnet_DeepBacs/latest/rdf.yaml", + "zero/Dataset_Noise2Void_2D_subtilis_DeepBacs/latest/rdf.yaml", + "zero/Dataset_SplineDist_2D_DeepBacs/latest/rdf.yaml", + "zero/Dataset_StarDist_2D_DeepBacs/latest/rdf.yaml", + "zero/Dataset_U-Net_2D_DeepBacs/latest/rdf.yaml", + "zero/Dataset_U-Net_2D_multilabel_DeepBacs/latest/rdf.yaml", + "zero/Dataset_YOLOv2_antibiotic_DeepBacs/latest/rdf.yaml", + "zero/Dataset_YOLOv2_coli_DeepBacs/latest/rdf.yaml", + "zero/Notebook_CycleGAN_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_DecoNoising_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_Detectron2_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_DRMIME_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_EmbedSeg_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_MaskRCNN_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_pix2pix_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_RetinaNet_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_StarDist_3D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_U-Net_2D_multilabel_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_U-Net_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_U-Net_3D_ZeroCostDL4Mic/latest/rdf.yaml", +} +KNOWN_INVALID_AS_LATEST = { + "10.5281/zenodo.5749843/5888237/rdf.yaml", + "10.5281/zenodo.5874841/6630266/rdf.yaml", + "10.5281/zenodo.5910163/5942853/rdf.yaml", + "10.5281/zenodo.5914248/6514622/rdf.yaml", + "10.5281/zenodo.5914248/8186255/rdf.yaml", + "10.5281/zenodo.6383429/7774505/rdf.yaml", + "10.5281/zenodo.6406803/6406804/rdf.yaml", + "10.5281/zenodo.6559474/6559475/rdf.yaml", + "10.5281/zenodo.6559929/6559930/rdf.yaml", + "10.5281/zenodo.6811491/6811492/rdf.yaml", + "10.5281/zenodo.6865412/6919253/rdf.yaml", + "10.5281/zenodo.7380171/7405349/rdf.yaml", + "10.5281/zenodo.7614645/7642674/rdf.yaml", + "10.5281/zenodo.8401064/8429203/rdf.yaml", + "10.5281/zenodo.8421755/8432366/rdf.yaml", + "biapy/biapy/latest/rdf.yaml", + "biapy/notebook_classification_2d/latest/rdf.yaml", + "biapy/notebook_classification_3d/latest/rdf.yaml", + "biapy/notebook_denoising_2d/latest/rdf.yaml", + "biapy/notebook_denoising_3d/latest/rdf.yaml", + "biapy/notebook_detection_2d/latest/rdf.yaml", + "biapy/notebook_detection_3d/latest/rdf.yaml", + "biapy/notebook_instance_segmentation_2d/latest/rdf.yaml", + "biapy/notebook_instance_segmentation_3d/latest/rdf.yaml", + "biapy/notebook_self_supervision_2d/latest/rdf.yaml", + "biapy/notebook_self_supervision_3d/latest/rdf.yaml", + "biapy/notebook_semantic_segmentation_2d/latest/rdf.yaml", + "biapy/Notebook_semantic_segmentation_3d/latest/rdf.yaml", + "biapy/notebook_super_resolution_2d/latest/rdf.yaml", + "biapy/notebook_super_resolution_3d/latest/rdf.yaml", + "bioimageio/stardist/latest/rdf.yaml", + "deepimagej/deepimagej-web/latest/rdf.yaml", + "deepimagej/deepimagej/latest/rdf.yaml", + "deepimagej/DeepSTORMZeroCostDL4Mic/latest/rdf.yaml", + "deepimagej/DeepSTORMZeroCostDL4Mic/latest/rdf.yaml", + "deepimagej/DeepSTORMZeroCostDL4Mic/latest/rdf.yaml", + "deepimagej/DeepSTORMZeroCostDL4Mic/latest/rdf.yaml", + "deepimagej/EVsTEMsegmentationFRUNet/latest/rdf.yaml", + "deepimagej/MoNuSeg_digital_pathology_miccai2018/latest/rdf.yaml", + "deepimagej/Mt3VirtualStaining/latest/rdf.yaml", + "deepimagej/MU-Lux_CTC_PhC-C2DL-PSC/latest/rdf.yaml", + "deepimagej/SkinLesionClassification/latest/rdf.yaml", + "deepimagej/smlm-deepimagej/latest/rdf.yaml", + "deepimagej/SMLMDensityMapEstimationDEFCoN/latest/rdf.yaml", + "deepimagej/unet-pancreaticcellsegmentation/latest/rdf.yaml", + "deepimagej/UNet2DGlioblastomaSegmentation/latest/rdf.yaml", + "deepimagej/WidefieldDapiSuperResolution/latest/rdf.yaml", + "deepimagej/WidefieldFitcSuperResolution/latest/rdf.yaml", + "deepimagej/WidefieldTxredSuperResolution/latest/rdf.yaml", + "dl4miceverywhere/DL4MicEverywhere/latest/rdf.yaml", + "dl4miceverywhere/Notebook_bioimageio_pytorch/latest/rdf.yaml", + "dl4miceverywhere/Notebook_bioimageio_tensorflow/latest/rdf.yaml", + "fiji/Fiji/latest/rdf.yaml", + "hpa/HPA-Classification/latest/rdf.yaml", + "hpa/hpa-kaggle-2021-dataset/latest/rdf.yaml", + "icy/icy/latest/rdf.yaml", + "ilastik/arabidopsis_tissue_atlas/latest/rdf.yaml", + "ilastik/cremi_training_data/latest/rdf.yaml", + "ilastik/ilastik/latest/rdf.yaml", + "ilastik/isbi2012_neuron_segmentation_challenge/latest/rdf.yaml", + "ilastik/mitoem_segmentation_challenge/latest/rdf.yaml", + "ilastik/mws-segmentation/latest/rdf.yaml", + "imjoy/BioImageIO-Packager/latest/rdf.yaml", + "imjoy/GenericBioEngineApp/latest/rdf.yaml", + "imjoy/HPA-Single-Cell/latest/rdf.yaml", + "imjoy/ImageJ.JS/latest/rdf.yaml", + "imjoy/ImJoy/latest/rdf.yaml", + "imjoy/LuCa-7color/latest/rdf.yaml", + "imjoy/vizarr/latest/rdf.yaml", + "qupath/QuPath/latest/rdf.yaml", + "stardist/stardist/latest/rdf.yaml", + "zero/Dataset_CARE_2D_coli_DeepBacs/latest/rdf.yaml", + "zero/Dataset_CARE_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Dataset_CARE_3D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Dataset_CycleGAN_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Dataset_Deep-STORM_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Dataset_fnet_3D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Dataset_fnet_DeepBacs/latest/rdf.yaml", + "zero/Dataset_Noise2Void_2D_subtilis_DeepBacs/latest/rdf.yaml", + "zero/Dataset_Noise2Void_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Dataset_Noise2Void_3D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Dataset_Noisy_Nuclei_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Dataset_pix2pix_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Dataset_SplineDist_2D_DeepBacs/latest/rdf.yaml", + "zero/Dataset_StarDist_2D_DeepBacs/latest/rdf.yaml", + "zero/Dataset_StarDist_2D_ZeroCostDL4Mic_2D/latest/rdf.yaml", + "zero/Dataset_StarDist_brightfield_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Dataset_StarDist_brightfield2_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Dataset_StarDist_Fluo_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Dataset_StarDist_fluo2_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Dataset_U-Net_2D_DeepBacs/latest/rdf.yaml", + "zero/Dataset_U-Net_2D_multilabel_DeepBacs/latest/rdf.yaml", + "zero/Dataset_YOLOv2_antibiotic_DeepBacs/latest/rdf.yaml", + "zero/Dataset_YOLOv2_coli_DeepBacs/latest/rdf.yaml", + "zero/Dataset_YOLOv2_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook Preview/latest/rdf.yaml", + "zero/Notebook_Augmentor_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_CARE_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_CARE_3D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_Cellpose_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_CycleGAN_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_CycleGAN_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_DecoNoising_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_DecoNoising_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_Deep-STORM_2D_ZeroCostDL4Mic_DeepImageJ/latest/rdf.yaml", + "zero/Notebook_Deep-STORM_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_DenoiSeg_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_Detectron2_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_Detectron2_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_DFCAN_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_DRMIME_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_DRMIME_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_EmbedSeg_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_EmbedSeg_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_fnet_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_fnet_3D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_Interactive_Segmentation_Kaibu_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_MaskRCNN_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_MaskRCNN_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_Noise2Void_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_Noise2Void_3D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_pix2pix_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_pix2pix_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/notebook_preview/latest/rdf.yaml-latest", + "zero/notebook_preview/latest/rdf.yaml", + "zero/Notebook_Quality_Control_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_RCAN_3D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_RetinaNet_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_RetinaNet_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_SplineDist_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_StarDist_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_StarDist_3D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_StarDist_3D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_U-Net_2D_multilabel_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_U-Net_2D_multilabel_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_U-Net_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_U-Net_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_U-Net_3D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_U-Net_3D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_YOLOv2_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/WGAN_ZeroCostDL4Mic.ipynb/latest/rdf.yaml", +} + + +def parse_args(): + p = ArgumentParser( + description=( + "report why some RDFs in the bioimage.io collection fail validation" + ) + ) + _ = p.add_argument( + "output", type=str, nargs="?", default="invalid_rdfs_{version}.md" + ) + _ = p.add_argument("limit", type=int, nargs="?", default=200) + _ = p.add_argument( + "--version", default="discover", nargs="?", choices=["discover", "latest"] + ) + args = p.parse_args() + return args + + +def main(output: Path, limit: int, version: Literal["discover", "latest"]): + if version == "discover": + invalid = KNOWN_INVALID + elif version == "latest": + invalid = KNOWN_INVALID_AS_LATEST + else: + assert_never(version) + + invalid = [RDF_BASE_URL + k for k in sorted(invalid)[:limit]] + + summaries = [load_description_and_validate_format_only(rdf) for rdf in invalid] + formatted = [s.format() for s in summaries] + out = "\n\n".join(formatted) + _ = output.write_text(out, encoding="utf-8") + print(out) + + +if __name__ == "__main__": + args = parse_args() + main(Path(args.output.format(version=args.version)), args.limit, args.version) diff --git a/scripts/update_spdx_licenses.py b/scripts/update_spdx_licenses.py new file mode 100644 index 000000000..6206b1ca9 --- /dev/null +++ b/scripts/update_spdx_licenses.py @@ -0,0 +1,72 @@ +"""script that updates the content of bioimageio/spec/static/spdx_licenses.json and generates the `LicenseId` Literal""" + +import json +import sys +import urllib.request +from argparse import ArgumentParser +from pathlib import Path + +import black.files +import black.mode + +PROJECT_ROOT = Path(__file__).parent.parent + +URL = ( + "https://raw.githubusercontent.com/spdx/license-list-data/{tag}/json/licenses.json" +) +LICENSES_JSON_FILE = PROJECT_ROOT / "bioimageio/spec/static/spdx_licenses.json" +LICENSE_ID_MODULE_PATH = ( + PROJECT_ROOT / "bioimageio/spec/_internal/_generated_spdx_license_literals.py" +) +LICENSE_ID_MODULE_TEMPLATE = """# This file was generated by scripts/update_spdx_licenses.py +from typing import Literal + +LicenseId = Literal{license_ids} + +DeprecatedLicenseId = Literal{deprecated_license_ids} +""" + + +def parse_args(): + p = ArgumentParser(description="script that generates weights formats overview") + _ = p.add_argument("tag", nargs="?", default="v3.21") + + args = p.parse_args() + return dict(tag=args.tag) + + +def main(*, tag: str): + url = URL.format(tag=tag) + print("requesting:", url) + text = urllib.request.urlopen(url).read().decode("utf-8") + _ = LICENSES_JSON_FILE.write_text(text, encoding="utf-8") + print(f"Updated {LICENSES_JSON_FILE}") + + licenses = json.loads(text)["licenses"] + license_ids = [x["licenseId"] for x in licenses if not x["isDeprecatedLicenseId"]] + deprecated_license_ids = [ + x["licenseId"] for x in licenses if x["isDeprecatedLicenseId"] + ] + code = LICENSE_ID_MODULE_TEMPLATE.format( + license_ids=license_ids, deprecated_license_ids=deprecated_license_ids + ) + + # apply black formating + black_config = black.files.parse_pyproject_toml( + str(PROJECT_ROOT / "pyproject.toml") + ) + black_config["target_versions"] = set( + ( + getattr(black.mode.TargetVersion, tv.upper()) + for tv in black_config.pop("target_version") + ) + ) + code = black.format_str(code, mode=black.mode.Mode(**black_config)) + + _ = LICENSE_ID_MODULE_PATH.write_text(code, encoding="utf-8") + print(f"Updated {LICENSE_ID_MODULE_PATH}") + + +if __name__ == "__main__": + kwargs = parse_args() + sys.exit(main(**kwargs)) diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 58ab0ca76..000000000 --- a/setup.cfg +++ /dev/null @@ -1,13 +0,0 @@ -[tool:isort] -line_length = 120 -multi_line_output = 3 -include_trailing_comma = true - -[flake8] -max-line-length = 120 - -[pylint] -max-line-length = 120 - -[mypy] -ignore_missing_imports = true diff --git a/setup.py b/setup.py index ebfa516fd..8fb54e704 100644 --- a/setup.py +++ b/setup.py @@ -1,5 +1,6 @@ import json from pathlib import Path + from setuptools import find_namespace_packages, setup # Get the long description from the README file @@ -12,41 +13,55 @@ setup( name="bioimageio.spec", version=VERSION, - description="Parser and validator library for BioImage.IO specifications", + description="Parser and validator library for bioimage.io specifications", long_description=long_description, long_description_content_type="text/markdown", url="https://github.com/bioimage-io/spec-bioimage-io", - author="Bioimage Team", - classifiers=[ # Optional + author="bioimage.io Team", + classifiers=[ "Development Status :: 3 - Alpha", "Intended Audience :: Developers", - "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", ], packages=find_namespace_packages(exclude=["tests"]), # Required install_requires=[ - "marshmallow-jsonschema", - "marshmallow-union", - "marshmallow>=3.6.0,<4.0", - "numpy", + "annotated-types>=0.5.0", + "email_validator", + "imageio", + "loguru", + "numpy>=1.21", "packaging>=17.0", - "requests;platform_system!='Emscripten'", - "ruamel.yaml;platform_system!='Emscripten'", - "tqdm;platform_system!='Emscripten'", - "typer", + "pooch", + "pydantic-settings", + "pydantic[email]>=2.6.3", + "python-dateutil", + "ruyaml", + "tqdm", "typing-extensions", ], - entry_points={"console_scripts": ["bioimageio = bioimageio.spec.__main__:app"]}, - extras_require={"test": ["black", "mypy", "pytest"], "dev": ["pre-commit"]}, - scripts=[ - "scripts/generate_json_specs.py", - "scripts/generate_processing_docs.py", - "scripts/generate_rdf_docs.py", - "scripts/generate_weight_formats_docs.py", - ], + extras_require={ + "dev": [ + "black", + "deepdiff", + "filelock", # for session fixtures due to pytest-xdist + "jsonschema", + "jupyter", + "lxml", + "pdoc", + "pre-commit", + "pyright", + "pytest-xdist[psutil]", # parallel pytest with 'pytest -n auto' + "pytest", + "ruff", # check line length in cases black cannot fix it + ] + }, + scripts=[], include_package_data=True, - project_urls={ # Optional + project_urls={ "Bug Reports": "https://github.com/bioimage-io/spec-bioimage-io/issues", "Source": "https://github.com/bioimage-io/spec-bioimage-io", }, diff --git a/supported_formats_and_operations.md b/supported_formats_and_operations.md deleted file mode 100644 index 937dc4bce..000000000 --- a/supported_formats_and_operations.md +++ /dev/null @@ -1,60 +0,0 @@ -# Weight Formats - -The supported weight formats are listed below. In addition to `source` and `sha256` which will be required for all formats, some format may contain additional fields. - -- `keras_hdf5`: A hdf5 file containing weights for Keras. -- `pytorch_script`: A torchscript file. -- `pytorch_state_dict`: A file containg the state dict of a pytorch model. -- `tensorflow_js`: A text JSON file named model.json, which carries the topology and reference to the weights files, used by tensorflow.js. -- `tensorflow_saved_model_bundle`: A zip file containing a `pb` file and `variables` folder. Additional fields are - - `tag` - - `tensorflow_version` -- `onnx`: A Open Neural Network Exchange file - - `opset_version` - -## Consumers - -Which consumer software supports which format? - -| `weight_format` | ilastik | deepImageJ | Fiji | -| --------------------- | ------- | ---------- | ---- | -| `keras_hdf5` | No | No | ? | -| `pytorch_script` | No | Yes | No | -| `pytorch_state_dict` | Yes | No | No | -| `tensorflow_js` | No | Yes | No | -| `tensorflow_saved_model_bundle` | No | Yes | Yes | -| `onnx` | ? | ? | ? | - - -## Postprocessing - -Additional postprocessing operations. - -- `scale_range` normalize the tensor with percentile normalization - - `kwargs` - - same as preprocessing - - `reference_tensor` tensor name to compute the percentiles from. Default: The tensor itself. If `mode`==`per_dataset` this needs to be the name of an input tensor. - - `reference_implementation` -- `scale_mean_variance` scale the tensor s.t. its mean and variance match a reference tensor - - `kwargs` - - `mode` one of `per_dataset` or `per_sample` (for fixed mean and variance use `scale_linear`) - - `reference_tensor` name of tensor to match - - `reference_implementation` - - -### Consumers - -Which consumer supports which postprocessing operation? - -| postprocesing | ilastik | deepImageJ | Fiji | -| --------------------- | ------- | ---------- | ---- | -| `scale_mean_variance` | ? | ? | ? | -| `scale_range` | ? | ? | ? | - - -# Run Modes - -Custom run modes to enable more complex prediction procedures. - - -## Consumers diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/conftest.py b/tests/conftest.py index 92afd6d74..3e8f5f33d 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,113 +1,79 @@ -import pathlib - -import pytest - - -@pytest.fixture -def unet2d_nuclei_broad_base_path(): - return pathlib.Path(__file__).parent / "../example_specs/models/unet2d_nuclei_broad" - - -def get_unet2d_nuclei_broad(unet2d_nuclei_broad_base_path, request) -> dict: - if request.param == "v0_4_9": - v = "" - else: - v = f"_{request.param}" - - f_name = f"rdf{v}.yaml" - return unet2d_nuclei_broad_base_path / f_name - - -@pytest.fixture(params=["v0_3_0", "v0_3_1", "v0_3_2", "v0_3_3", "v0_3_6", "v0_4_0", "v0_4_9"]) -def unet2d_nuclei_broad_any(unet2d_nuclei_broad_base_path, request): - yield get_unet2d_nuclei_broad(unet2d_nuclei_broad_base_path, request) - - -@pytest.fixture(params=["v0_3_0", "v0_3_1", "v0_3_2", "v0_3_3", "v0_3_6", "v0_4_0"]) -def unet2d_nuclei_broad_before_latest(unet2d_nuclei_broad_base_path, request): - yield get_unet2d_nuclei_broad(unet2d_nuclei_broad_base_path, request) - - -@pytest.fixture(params=["v0_4_9"]) -def unet2d_nuclei_broad_latest(unet2d_nuclei_broad_base_path, request): - yield get_unet2d_nuclei_broad(unet2d_nuclei_broad_base_path, request) - - -@pytest.fixture(params=["v0_3_6", "v0_4_9"]) -def unet2d_nuclei_broad_any_minor(unet2d_nuclei_broad_base_path, request): - yield get_unet2d_nuclei_broad(unet2d_nuclei_broad_base_path, request) - - -@pytest.fixture -def invalid_rdf_v0_4_0_duplicate_tensor_names(unet2d_nuclei_broad_base_path): - return unet2d_nuclei_broad_base_path / "invalid_rdf_v0_4_0_duplicate_tensor_names.yaml" - - -@pytest.fixture -def unet2d_nuclei_broad_collection(): - return pathlib.Path(__file__).parent / "../example_specs/collections/unet2d_nuclei_broad_coll/rdf.yaml" - - -@pytest.fixture -def partner_collection(): - return pathlib.Path(__file__).parent / "../example_specs/collections/partner_collection/rdf.yaml" - - -@pytest.fixture -def unet2d_nuclei_broad_url(): - return "https://raw.githubusercontent.com/bioimage-io/spec-bioimage-io/main/example_specs/models/unet2d_nuclei_broad/rdf.yaml" - - -@pytest.fixture -def FruNet_model_url(): - return "https://raw.githubusercontent.com/deepimagej/models/master/fru-net_sev_segmentation/model.yaml" - - -@pytest.fixture -def unet2d_diff_output_shape(): - return pathlib.Path(__file__).parent / "../example_specs/models/unet2d_diff_output_shape/rdf.yaml" - - -@pytest.fixture -def unet2d_fixed_shape(): - return pathlib.Path(__file__).parent / "../example_specs/models/unet2d_fixed_shape/rdf.yaml" - - -@pytest.fixture -def unet2d_multi_tensor(): - return pathlib.Path(__file__).parent / "../example_specs/models/unet2d_multi_tensor/rdf.yaml" - - -@pytest.fixture -def unet2d_expanded_output_shape(): - return pathlib.Path(__file__).parent / "../example_specs/models/unet2d_nuclei_broad/rdf_expand_output_shape.yaml" - - -@pytest.fixture -def hpa_model(): - return pathlib.Path(__file__).parent / "../example_specs/models/hpa-densenet/rdf.yaml" - - -@pytest.fixture -def stardist_model(): - return pathlib.Path(__file__).parent / "../example_specs/models/stardist_example_model/rdf.yaml" - - -@pytest.fixture -def unet2d_keras_tf(): - return pathlib.Path(__file__).parent / "../example_specs/models/unet2d_keras_tf/rdf.yaml" - - -@pytest.fixture -def unet2d_keras_tf2(): - return pathlib.Path(__file__).parent / "../example_specs/models/unet2d_keras_tf2/rdf.yaml" - - -@pytest.fixture -def dataset_rdf(): - return pathlib.Path(__file__).parent / "../example_specs/datasets/covid_if_training_data/rdf.yaml" - - -@pytest.fixture -def upsamle_model_rdf(): - return pathlib.Path(__file__).parent / "../example_specs/models/upsample_test_model/rdf.yaml" +import json +from pathlib import Path +from pprint import pprint +from types import MappingProxyType +from typing import Any, Dict, Union + +import pytest +from filelock import FileLock +from ruyaml import YAML + +from bioimageio.spec._internal.constants import ( + KNOWN_GH_USERS, + KNOWN_INVALID_GH_USERS, + N_KNOWN_GH_USERS, + N_KNOWN_INVALID_GH_USERS, +) + +yaml = YAML(typ="safe") + +EXAMPLE_SPECS = Path(__file__).parent / "../example_descriptions/" +UNET2D_ROOT = EXAMPLE_SPECS / "models/unet2d_nuclei_broad" + + +@pytest.fixture(scope="session") +def bioimageio_json_schema( + tmp_path_factory: pytest.TempPathFactory, worker_id: str +) -> Dict[Any, Any]: + """generates json schema (only run with one worker) + see https://pytest-xdist.readthedocs.io/en/latest/how-to.html#making-session-scoped-fixtures-execute-only-once + """ + from scripts.generate_json_schemas import generate_json_schemas + + root_tmp_dir = tmp_path_factory.getbasetemp().parent + path = root_tmp_dir / "bioimageio_schema_latest.json" + if worker_id == "master": + # no workers + generate_json_schemas(root_tmp_dir, "generate") + schema: Union[Any, Dict[Any, Any]] = json.loads(path.read_text()) + else: + with FileLock(path.with_suffix(path.suffix + ".lock")): + if not path.is_file(): + generate_json_schemas(root_tmp_dir, "generate") + + schema: Union[Any, Dict[Any, Any]] = json.loads(path.read_text()) + + assert isinstance(schema, dict) + return schema + + +@pytest.fixture(scope="session") +def stardist04_data(): + with ( + EXAMPLE_SPECS / "models/stardist_example_model/v0_4.bioimageio.yaml" + ).open() as f: + return MappingProxyType(yaml.load(f)) + + +@pytest.fixture(scope="session") +def unet2d_path() -> Path: + return UNET2D_ROOT / "bioimageio.yaml" + + +@pytest.fixture(scope="session") +def unet2d_data(unet2d_path: Path): + with unet2d_path.open() as f: + data: Union[Any, Dict[Any, Any]] = yaml.load(f) + + assert isinstance(data, dict) + return MappingProxyType(data) + + +def pytest_sessionfinish(session: Any, exitstatus: Any): + if len(KNOWN_GH_USERS) > N_KNOWN_GH_USERS: + print("updated known gh users:") + pprint(KNOWN_GH_USERS) + + if len(KNOWN_INVALID_GH_USERS) > N_KNOWN_INVALID_GH_USERS: + print("updated known invalid gh users:") + pprint(KNOWN_INVALID_GH_USERS) diff --git a/tests/test_bioimageio_collection.py b/tests/test_bioimageio_collection.py new file mode 100644 index 000000000..3baae018a --- /dev/null +++ b/tests/test_bioimageio_collection.py @@ -0,0 +1,266 @@ +import datetime +import json +from pathlib import Path +from typing import Any, Dict, Iterable, Mapping + +import pooch +import pytest + +from bioimageio.spec._description import DISCOVER, LATEST +from bioimageio.spec._internal.types import FormatVersionPlaceholder +from tests.utils import ParameterSet, check_bioimageio_yaml + +BASE_URL = "https://bioimage-io.github.io/collection-bioimage-io/" +RDF_BASE_URL = BASE_URL + "rdfs/" +WEEK = f"{datetime.datetime.now().year}week{datetime.datetime.now().isocalendar()[1]}" +CACHE_PATH = Path(__file__).parent / "cache" / WEEK + + +KNOWN_INVALID = { + "10.5281/zenodo.5749843/5888237/rdf.yaml", + "10.5281/zenodo.5910163/5942853/rdf.yaml", + "10.5281/zenodo.5910854/6539073/rdf.yaml", + "10.5281/zenodo.5914248/6514622/rdf.yaml", + "10.5281/zenodo.6559929/6559930/rdf.yaml", + "10.5281/zenodo.7614645/7642674/rdf.yaml", + "biapy/biapy/latest/rdf.yaml", + "biapy/notebook_classification_2d/latest/rdf.yaml", + "biapy/Notebook_semantic_segmentation_3d/latest/rdf.yaml", + "deepimagej/deepimagej/latest/rdf.yaml", + "deepimagej/DeepSTORMZeroCostDL4Mic/latest/rdf.yaml", + "deepimagej/Mt3VirtualStaining/latest/rdf.yaml", + "deepimagej/MU-Lux_CTC_PhC-C2DL-PSC/latest/rdf.yaml", + "deepimagej/SkinLesionClassification/latest/rdf.yaml", + "deepimagej/SMLMDensityMapEstimationDEFCoN/latest/rdf.yaml", + "deepimagej/UNet2DGlioblastomaSegmentation/latest/rdf.yaml", + "deepimagej/WidefieldDapiSuperResolution/latest/rdf.yaml", + "deepimagej/WidefieldFitcSuperResolution/latest/rdf.yaml", + "deepimagej/WidefieldTxredSuperResolution/latest/rdf.yaml", + "fiji/N2VSEMDemo/latest/rdf.yaml", + "ilastik/mitoem_segmentation_challenge/latest/rdf.yaml", + "imjoy/LuCa-7color/latest/rdf.yaml", + "zero/Dataset_CARE_2D_coli_DeepBacs/latest/rdf.yaml", + "zero/Dataset_fnet_DeepBacs/latest/rdf.yaml", + "zero/Dataset_Noise2Void_2D_subtilis_DeepBacs/latest/rdf.yaml", + "zero/Dataset_SplineDist_2D_DeepBacs/latest/rdf.yaml", + "zero/Dataset_StarDist_2D_DeepBacs/latest/rdf.yaml", + "zero/Dataset_U-Net_2D_DeepBacs/latest/rdf.yaml", + "zero/Dataset_U-Net_2D_multilabel_DeepBacs/latest/rdf.yaml", + "zero/Dataset_YOLOv2_antibiotic_DeepBacs/latest/rdf.yaml", + "zero/Dataset_YOLOv2_coli_DeepBacs/latest/rdf.yaml", + "zero/Notebook_CycleGAN_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_DecoNoising_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_Detectron2_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_DRMIME_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_EmbedSeg_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_MaskRCNN_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_pix2pix_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_RetinaNet_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_StarDist_3D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_U-Net_2D_multilabel_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_U-Net_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_U-Net_3D_ZeroCostDL4Mic/latest/rdf.yaml", +} +KNOWN_INVALID_AS_LATEST = { + "10.5281/zenodo.5749843/5888237/rdf.yaml", + "10.5281/zenodo.5874841/6630266/rdf.yaml", + "10.5281/zenodo.5910163/5942853/rdf.yaml", + "10.5281/zenodo.5914248/6514622/rdf.yaml", + "10.5281/zenodo.5914248/8186255/rdf.yaml", + "10.5281/zenodo.6383429/7774505/rdf.yaml", + "10.5281/zenodo.6406803/6406804/rdf.yaml", + "10.5281/zenodo.6559474/6559475/rdf.yaml", + "10.5281/zenodo.6559929/6559930/rdf.yaml", + "10.5281/zenodo.6811491/6811492/rdf.yaml", + "10.5281/zenodo.6865412/6919253/rdf.yaml", + "10.5281/zenodo.7380171/7405349/rdf.yaml", + "10.5281/zenodo.7614645/7642674/rdf.yaml", + "10.5281/zenodo.8401064/8429203/rdf.yaml", + "10.5281/zenodo.8421755/8432366/rdf.yaml", + "biapy/biapy/latest/rdf.yaml", + "biapy/notebook_classification_2d/latest/rdf.yaml", + "biapy/notebook_classification_3d/latest/rdf.yaml", + "biapy/notebook_denoising_2d/latest/rdf.yaml", + "biapy/notebook_denoising_3d/latest/rdf.yaml", + "biapy/notebook_detection_2d/latest/rdf.yaml", + "biapy/notebook_detection_3d/latest/rdf.yaml", + "biapy/notebook_instance_segmentation_2d/latest/rdf.yaml", + "biapy/notebook_instance_segmentation_3d/latest/rdf.yaml", + "biapy/notebook_self_supervision_2d/latest/rdf.yaml", + "biapy/notebook_self_supervision_3d/latest/rdf.yaml", + "biapy/notebook_semantic_segmentation_2d/latest/rdf.yaml", + "biapy/Notebook_semantic_segmentation_3d/latest/rdf.yaml", + "biapy/notebook_super_resolution_2d/latest/rdf.yaml", + "biapy/notebook_super_resolution_3d/latest/rdf.yaml", + "bioimageio/stardist/latest/rdf.yaml", + "deepimagej/deepimagej-web/latest/rdf.yaml", + "deepimagej/deepimagej/latest/rdf.yaml", + "deepimagej/DeepSTORMZeroCostDL4Mic/latest/rdf.yaml", + "deepimagej/DeepSTORMZeroCostDL4Mic/latest/rdf.yaml", + "deepimagej/DeepSTORMZeroCostDL4Mic/latest/rdf.yaml", + "deepimagej/DeepSTORMZeroCostDL4Mic/latest/rdf.yaml", + "deepimagej/EVsTEMsegmentationFRUNet/latest/rdf.yaml", + "deepimagej/MoNuSeg_digital_pathology_miccai2018/latest/rdf.yaml", + "deepimagej/Mt3VirtualStaining/latest/rdf.yaml", + "deepimagej/MU-Lux_CTC_PhC-C2DL-PSC/latest/rdf.yaml", + "deepimagej/SkinLesionClassification/latest/rdf.yaml", + "deepimagej/smlm-deepimagej/latest/rdf.yaml", + "deepimagej/SMLMDensityMapEstimationDEFCoN/latest/rdf.yaml", + "deepimagej/unet-pancreaticcellsegmentation/latest/rdf.yaml", + "deepimagej/UNet2DGlioblastomaSegmentation/latest/rdf.yaml", + "deepimagej/WidefieldDapiSuperResolution/latest/rdf.yaml", + "deepimagej/WidefieldFitcSuperResolution/latest/rdf.yaml", + "deepimagej/WidefieldTxredSuperResolution/latest/rdf.yaml", + "dl4miceverywhere/DL4MicEverywhere/latest/rdf.yaml", + "dl4miceverywhere/Notebook_bioimageio_pytorch/latest/rdf.yaml", + "dl4miceverywhere/Notebook_bioimageio_tensorflow/latest/rdf.yaml", + "fiji/Fiji/latest/rdf.yaml", + "hpa/HPA-Classification/latest/rdf.yaml", + "hpa/hpa-kaggle-2021-dataset/latest/rdf.yaml", + "icy/icy/latest/rdf.yaml", + "ilastik/arabidopsis_tissue_atlas/latest/rdf.yaml", + "ilastik/cremi_training_data/latest/rdf.yaml", + "ilastik/ilastik/latest/rdf.yaml", + "ilastik/isbi2012_neuron_segmentation_challenge/latest/rdf.yaml", + "ilastik/mitoem_segmentation_challenge/latest/rdf.yaml", + "ilastik/mws-segmentation/latest/rdf.yaml", + "imjoy/BioImageIO-Packager/latest/rdf.yaml", + "imjoy/GenericBioEngineApp/latest/rdf.yaml", + "imjoy/HPA-Single-Cell/latest/rdf.yaml", + "imjoy/ImageJ.JS/latest/rdf.yaml", + "imjoy/ImJoy/latest/rdf.yaml", + "imjoy/LuCa-7color/latest/rdf.yaml", + "imjoy/vizarr/latest/rdf.yaml", + "qupath/QuPath/latest/rdf.yaml", + "stardist/stardist/latest/rdf.yaml", + "zero/Dataset_CARE_2D_coli_DeepBacs/latest/rdf.yaml", + "zero/Dataset_CARE_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Dataset_CARE_3D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Dataset_CycleGAN_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Dataset_Deep-STORM_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Dataset_fnet_3D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Dataset_fnet_DeepBacs/latest/rdf.yaml", + "zero/Dataset_Noise2Void_2D_subtilis_DeepBacs/latest/rdf.yaml", + "zero/Dataset_Noise2Void_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Dataset_Noise2Void_3D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Dataset_Noisy_Nuclei_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Dataset_pix2pix_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Dataset_SplineDist_2D_DeepBacs/latest/rdf.yaml", + "zero/Dataset_StarDist_2D_DeepBacs/latest/rdf.yaml", + "zero/Dataset_StarDist_2D_ZeroCostDL4Mic_2D/latest/rdf.yaml", + "zero/Dataset_StarDist_brightfield_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Dataset_StarDist_brightfield2_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Dataset_StarDist_Fluo_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Dataset_StarDist_fluo2_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Dataset_U-Net_2D_DeepBacs/latest/rdf.yaml", + "zero/Dataset_U-Net_2D_multilabel_DeepBacs/latest/rdf.yaml", + "zero/Dataset_YOLOv2_antibiotic_DeepBacs/latest/rdf.yaml", + "zero/Dataset_YOLOv2_coli_DeepBacs/latest/rdf.yaml", + "zero/Dataset_YOLOv2_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook Preview/latest/rdf.yaml", + "zero/Notebook_Augmentor_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_CARE_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_CARE_3D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_Cellpose_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_CycleGAN_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_CycleGAN_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_DecoNoising_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_DecoNoising_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_Deep-STORM_2D_ZeroCostDL4Mic_DeepImageJ/latest/rdf.yaml", + "zero/Notebook_Deep-STORM_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_DenoiSeg_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_Detectron2_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_Detectron2_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_DFCAN_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_DRMIME_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_DRMIME_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_EmbedSeg_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_EmbedSeg_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_fnet_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_fnet_3D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_Interactive_Segmentation_Kaibu_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_MaskRCNN_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_MaskRCNN_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_Noise2Void_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_Noise2Void_3D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_pix2pix_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_pix2pix_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/notebook_preview/latest/rdf.yaml-latest", + "zero/notebook_preview/latest/rdf.yaml", + "zero/Notebook_Quality_Control_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_RCAN_3D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_RetinaNet_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_RetinaNet_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_SplineDist_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_StarDist_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_StarDist_3D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_StarDist_3D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_U-Net_2D_multilabel_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_U-Net_2D_multilabel_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_U-Net_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_U-Net_2D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_U-Net_3D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_U-Net_3D_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/Notebook_YOLOv2_ZeroCostDL4Mic/latest/rdf.yaml", + "zero/WGAN_ZeroCostDL4Mic.ipynb/latest/rdf.yaml", +} +EXCLUDE_FIELDS_FROM_ROUNDTRIP = { + "10.5281/zenodo.6348728/6348729/rdf.yaml": {"cite"}, # doi prefixed + "10.5281/zenodo.6406803/6406804/rdf.yaml": {"cite"}, # doi prefixed + "10.5281/zenodo.6338614/6338615/rdf.yaml": {"cite"}, # doi prefixed + "10.5281/zenodo.5914248/8186255/rdf.yaml": {"cite"}, # doi prefixed + "10.5281/zenodo.7274275/8123818/rdf.yaml": {"inputs", "parent"}, + "10.5281/zenodo.7315440/7315441/rdf.yaml": { + "cite", + "maintainers", + "weights", + }, # weights.onnx: missing sh256, cite[0].doi: prefix + "10.5281/zenodo.7772662/7781091/rdf.yaml": { + "weights" + }, # upper to lower case sha256 + "10.5281/zenodo.6028097/6028098/rdf.yaml": { + "authors", # gh username "Constantin Pape" -> contantinpape + "maintainers", + }, + "zero/Notebook Preview/latest/rdf.yaml": {"rdf_source"}, # ' ' -> %20 +} + + +def yield_bioimageio_yaml_urls() -> Iterable[ParameterSet]: + collection_path: Any = pooch.retrieve(BASE_URL + "collection.json", None) + with Path(collection_path).open(encoding="utf-8") as f: + collection_data = json.load(f)["collection"] + + collection_registry: Dict[str, None] = { + entry["rdf_source"].replace(RDF_BASE_URL, ""): None for entry in collection_data + } + + for rdf in collection_registry: + descr_url = RDF_BASE_URL + rdf + key = rdf + yield pytest.param(descr_url, key, id=key) + + +@pytest.mark.parametrize("format_version", [DISCOVER, LATEST]) +@pytest.mark.parametrize("descr_url,key", list(yield_bioimageio_yaml_urls())) +def test_rdf( + descr_url: Path, + key: str, + format_version: FormatVersionPlaceholder, + bioimageio_json_schema: Mapping[Any, Any], +): + if ( + format_version == DISCOVER + and key in KNOWN_INVALID + or format_version == LATEST + and key in KNOWN_INVALID_AS_LATEST + ): + pytest.skip("known failure") + + check_bioimageio_yaml( + descr_url, + as_latest=format_version == LATEST, + exclude_fields_from_roundtrip=EXCLUDE_FIELDS_FROM_ROUNDTRIP.get(key, set()), + bioimageio_json_schema=bioimageio_json_schema, + perform_io_checks=False, + ) diff --git a/tests/test_cli.py b/tests/test_cli.py deleted file mode 100644 index b826198a9..000000000 --- a/tests/test_cli.py +++ /dev/null @@ -1,111 +0,0 @@ -import os -import subprocess -import zipfile -from typing import Sequence - -import pytest - -from bioimageio.spec.io_ import ( - load_raw_resource_description, - save_raw_resource_description, - serialize_raw_resource_description, -) -from bioimageio.spec.shared import yaml - -SKIP_ZENODO = True -SKIP_ZENODO_REASON = "zenodo api changes" - - -def run_subprocess(commands: Sequence[str], **kwargs) -> subprocess.CompletedProcess: - return subprocess.run(commands, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, encoding="utf-8", **kwargs) - - -def test_cli_validate_model(unet2d_nuclei_broad_latest): - ret = run_subprocess(["bioimageio", "validate", str(unet2d_nuclei_broad_latest)]) - assert ret.returncode == 0 - - -def test_cli_validate_model_url(): - ret = run_subprocess( - [ - "bioimageio", - "validate", - "https://raw.githubusercontent.com/bioimage-io/spec-bioimage-io/main/example_specs/models/unet2d_nuclei_broad/rdf.yaml", - ] - ) - assert ret.returncode == 0 - - -def test_cli_validate_model_url_wo_cache(): - env = os.environ.copy() - env["BIOIMAGEIO_USE_CACHE"] = "false" - ret = run_subprocess( - [ - "bioimageio", - "validate", - "https://raw.githubusercontent.com/bioimage-io/spec-bioimage-io/main/example_specs/models/unet2d_nuclei_broad/rdf.yaml", - ], - env=env, - ) - assert ret.returncode == 0 - - -@pytest.mark.skipif(SKIP_ZENODO, reason=SKIP_ZENODO_REASON) -def test_cli_validate_model_doi(): - ret = run_subprocess(["bioimageio", "validate", "10.5281/zenodo.5744489"]) - assert ret.returncode == 0 - - -def test_cli_validate_model_package(unet2d_nuclei_broad_latest, tmpdir): - zf_path = tmpdir / "package.zip" - - # load from path and serialize with absolute paths - raw_rd = load_raw_resource_description(unet2d_nuclei_broad_latest) - rdf_str = serialize_raw_resource_description(raw_rd, convert_absolute_paths=False) - - with zipfile.ZipFile(zf_path, "w") as zf: - zf.writestr("rdf.yaml", rdf_str) - - ret = run_subprocess(["bioimageio", "validate", str(zf_path)]) - assert ret.returncode == 0 - - -def test_cli_validate_model_package_wo_cache(unet2d_nuclei_broad_latest, tmpdir): - env = os.environ.copy() - env["BIOIMAGEIO_USE_CACHE"] = "false" - - # load from path and serialize with absolute paths - raw_rd = load_raw_resource_description(unet2d_nuclei_broad_latest) - rdf_str = serialize_raw_resource_description(raw_rd, convert_absolute_paths=False) - - zf_path = tmpdir / "package.zip" - with zipfile.ZipFile(zf_path, "w") as zf: - zf.writestr("rdf.yaml", rdf_str) - - ret = run_subprocess(["bioimageio", "validate", str(zf_path)], env=env) - assert ret.returncode == 0 - - -def test_cli_update_format(unet2d_nuclei_broad_before_latest, tmp_path): - in_path = tmp_path / "rdf.yaml" - save_raw_resource_description(load_raw_resource_description(unet2d_nuclei_broad_before_latest), in_path) - assert in_path.exists() - path = tmp_path / "rdf_new.yaml" - ret = run_subprocess(["bioimageio", "update-format", str(in_path), str(path)]) - assert ret.returncode == 0 - assert path.exists() - - -def test_update_rdf(unet2d_nuclei_broad_base_path, tmp_path): - in_path = unet2d_nuclei_broad_base_path / "rdf.yaml" - assert in_path.exists() - update_path = tmp_path / "update.yaml" - assert yaml is not None - yaml.dump(dict(name="updated", outputs=[{"name": "updated", "halo": ["KEEP", "DROP", 0, 9, 9]}]), update_path) - out_path = tmp_path / "output.yaml" - ret = run_subprocess(["bioimageio", "update-rdf", str(in_path), str(update_path), str(out_path)]) - assert ret.returncode == 0 - actual = yaml.load(out_path) - assert actual["name"] == "updated" - assert actual["outputs"][0]["name"] == "updated" - assert actual["outputs"][0]["halo"] == [0, 0, 9, 9] diff --git a/tests/test_commands.py b/tests/test_commands.py deleted file mode 100644 index 9bc483407..000000000 --- a/tests/test_commands.py +++ /dev/null @@ -1,267 +0,0 @@ -import zipfile -from io import BytesIO, StringIO - -import pytest - -from bioimageio.spec import ( - load_raw_resource_description, - serialize_raw_resource_description, - serialize_raw_resource_description_to_dict, -) -from bioimageio.spec.model import format_version, raw_nodes -from bioimageio.spec.shared import yaml - -SKIP_ZENODO = False -SKIP_ZENODO_REASON = "zenodo api changes" - - -def test_validate_dataset(dataset_rdf): - from bioimageio.spec.commands import validate - - summary = validate(dataset_rdf, update_format=True, update_format_inner=False) - assert summary["status"] == "passed", summary - summary = validate(dataset_rdf, update_format=False, update_format_inner=False) - assert summary["status"] == "passed", summary - - -def test_validate_model_as_dict(unet2d_nuclei_broad_any): - from bioimageio.spec.commands import validate - - assert not validate(unet2d_nuclei_broad_any, update_format=True, update_format_inner=False)["error"] - assert not validate(unet2d_nuclei_broad_any, update_format=False, update_format_inner=False)["error"] - - -def test_validate_model_as_url(): - from bioimageio.spec.commands import validate - - assert not validate( - "https://raw.githubusercontent.com/bioimage-io/spec-bioimage-io/main/example_specs/models/unet2d_nuclei_broad/rdf.yaml", - update_format=True, - update_format_inner=False, - )["error"] - assert not validate( - "https://raw.githubusercontent.com/bioimage-io/spec-bioimage-io/main/example_specs/models/unet2d_nuclei_broad/rdf.yaml", - update_format=False, - update_format_inner=False, - )["error"] - - -@pytest.mark.skipif(SKIP_ZENODO, reason=SKIP_ZENODO_REASON) -def test_validate_model_as_zenodo_doi(): - from bioimageio.spec.commands import validate - - doi = "10.5281/zenodo.5744490" - assert not validate(doi, update_format=False, update_format_inner=False)["error"] - - # expecting UnconvertibleError due to missing sha256 - assert validate(doi, update_format=True, update_format_inner=False)["error"] - - -def test_validate_model_as_bioimageio_full_version_id_partner(): - from bioimageio.spec.commands import validate - - full_version_id = "ilastik/isbi2012_neuron_segmentation_challenge/latest" - summary = validate(full_version_id, update_format=False, update_format_inner=False) - assert summary["status"] == "passed", summary["error"] - - -@pytest.mark.skipif(SKIP_ZENODO, reason=SKIP_ZENODO_REASON) -def test_validate_model_as_bioimageio_full_version_id_zenodo(): - from bioimageio.spec.commands import validate - - full_version_id = "10.5281/zenodo.5874741/5874742" - summary = validate(full_version_id, update_format=False, update_format_inner=False) - assert summary["status"] == "passed", summary["error"] - - -def test_validate_model_as_bioimageio_resource_id_partner(): - from bioimageio.spec.commands import validate - - resource_id = "ilastik/isbi2012_neuron_segmentation_challenge" - summary = validate(resource_id, update_format=False, update_format_inner=False) - assert summary["status"] == "passed", summary["error"] - - -@pytest.mark.skipif(SKIP_ZENODO, reason=SKIP_ZENODO_REASON) -def test_validate_model_as_bioimageio_resource_id_zenodo(): - from bioimageio.spec.commands import validate - - resource_id = "10.5281/zenodo.5874741" - summary = validate(resource_id, update_format=False, update_format_inner=False) - assert summary["status"] == "passed", summary["error"] - - -def test_validate_model_as_bytes_io(unet2d_nuclei_broad_latest): - from bioimageio.spec.commands import validate - - # load from path and serialize with absolute paths - raw_rd = load_raw_resource_description(unet2d_nuclei_broad_latest) - data_str = serialize_raw_resource_description(raw_rd, convert_absolute_paths=False) - - data = BytesIO(data_str.encode("utf-8")) - data.seek(0) - assert not validate(data, update_format=True, update_format_inner=False)["error"] - data.seek(0) - assert not validate(data, update_format=False, update_format_inner=False)["error"] - - -def test_validate_model_as_string_io(unet2d_nuclei_broad_latest): - from bioimageio.spec.commands import validate - - # load from path and serialize with absolute paths - raw_rd = load_raw_resource_description(unet2d_nuclei_broad_latest) - data_str = serialize_raw_resource_description(raw_rd, convert_absolute_paths=False) - - data = StringIO(data_str) - data.seek(0) - assert not validate(data, update_format=True, update_format_inner=False)["error"] - data.seek(0) - assert not validate(data, update_format=False, update_format_inner=False)["error"] - - -def test_validate_model_as_bytes(unet2d_nuclei_broad_latest): - from bioimageio.spec.commands import validate - - # load from path and serialize with absolute paths - raw_rd = load_raw_resource_description(unet2d_nuclei_broad_latest) - data_str = serialize_raw_resource_description(raw_rd, convert_absolute_paths=False) - - data = data_str.encode("utf-8") - assert not validate(data, update_format=True, update_format_inner=False)["error"] - assert not validate(data, update_format=False, update_format_inner=False)["error"] - - -def test_validate_model_as_string(unet2d_nuclei_broad_latest): - from bioimageio.spec.commands import validate - - # load from path and serialize with absolute paths - raw_rd = load_raw_resource_description(unet2d_nuclei_broad_latest) - data = serialize_raw_resource_description(raw_rd, convert_absolute_paths=False) - - assert not validate(data, update_format=True, update_format_inner=False)["error"] - assert not validate(data, update_format=False, update_format_inner=False)["error"] - - -def test_validate_model_package_as_bytes(unet2d_nuclei_broad_latest): - from bioimageio.spec.commands import validate - - # load from path and serialize with absolute paths - raw_rd = load_raw_resource_description(unet2d_nuclei_broad_latest) - rdf_str = serialize_raw_resource_description(raw_rd, convert_absolute_paths=False) - - data = BytesIO() - with zipfile.ZipFile(data, "w") as zf: - zf.writestr("rdf.yaml", rdf_str) - - data.seek(0) - assert not validate(data, update_format=True, update_format_inner=False)["error"] - data.seek(0) - assert not validate(data, update_format=False, update_format_inner=False)["error"] - - -def test_validate_model_package_on_disk(unet2d_nuclei_broad_latest, tmpdir): - from bioimageio.spec.commands import validate - - # load from path and serialize with absolute paths - raw_rd = load_raw_resource_description(unet2d_nuclei_broad_latest) - rdf_str = serialize_raw_resource_description(raw_rd, convert_absolute_paths=False) - - zf_path = tmpdir / "package.zip" - with zipfile.ZipFile(zf_path, "w") as zf: - zf.writestr("rdf.yaml", rdf_str) - - assert not validate(zf_path, update_format=True, update_format_inner=False)["error"] - assert not validate(zf_path, update_format=False, update_format_inner=False)["error"] - - -def test_validate_invalid_model(unet2d_nuclei_broad_latest): - from bioimageio.spec.commands import validate - - # load from path and serialize with absolute paths - raw_rd = load_raw_resource_description(unet2d_nuclei_broad_latest) - data = serialize_raw_resource_description_to_dict(raw_rd) - - del data["test_inputs"] # invalidate data - assert validate(data, update_format=True, update_format_inner=False)["error"] - assert validate(data, update_format=False, update_format_inner=False)["error"] - - -def test_validate_generates_warnings(unet2d_nuclei_broad_latest): - from bioimageio.spec.commands import validate - - raw_rd = load_raw_resource_description(unet2d_nuclei_broad_latest) - data = serialize_raw_resource_description_to_dict(raw_rd) - data["license"] = "BSD-2-Clause-FreeBSD" - data["run_mode"] = {"name": "fancy"} - summary = validate(data, update_format=True, update_format_inner=False) - - assert summary["warnings"] - - -def test_update_format(unet2d_nuclei_broad_before_latest, tmp_path): - from bioimageio.spec.commands import update_format - - path = tmp_path / "rdf_new.yaml" - update_format(unet2d_nuclei_broad_before_latest, path) - - assert path.exists() - model = load_raw_resource_description(path) - assert model.format_version == format_version - - -def test_update_rdf_using_paths(unet2d_nuclei_broad_latest, tmp_path): - from bioimageio.spec.commands import update_rdf - - in_path = unet2d_nuclei_broad_latest - assert in_path.exists() - update_path = tmp_path / "update.yaml" - assert yaml is not None - yaml.dump(dict(name="updated", outputs=[{"name": "updated", "halo": ["KEEP", "DROP", 0, 9, 9]}]), update_path) - out_path = tmp_path / "output.yaml" - update_rdf(in_path, update_path, out_path) - actual = yaml.load(out_path) - assert actual["name"] == "updated" - assert actual["outputs"][0]["name"] == "updated" - assert actual["outputs"][0]["halo"] == [0, 0, 9, 9] - - -def test_update_rdf_using_dicts(unet2d_nuclei_broad_latest): - from bioimageio.spec.commands import update_rdf - - # load from path and serialize with absolute paths - raw_rd = load_raw_resource_description(unet2d_nuclei_broad_latest) - source = serialize_raw_resource_description_to_dict(raw_rd, convert_absolute_paths=False) - - update = dict(name="updated", outputs=[{"name": "updated", "halo": ["KEEP", "DROP", 0, 9, 9]}]) - actual = update_rdf(source, update) - assert isinstance(actual, dict) - assert actual["name"] == "updated" - assert actual["outputs"][0]["name"] == "updated" - assert actual["outputs"][0]["halo"] == [0, 0, 9, 9] - - -def test_update_rdf_using_dicts_in_place(unet2d_nuclei_broad_latest): - from bioimageio.spec.commands import update_rdf - - # load from path and serialize with absolute paths - raw_rd = load_raw_resource_description(unet2d_nuclei_broad_latest) - source = serialize_raw_resource_description_to_dict(raw_rd, convert_absolute_paths=False) - - update = dict(name="updated", outputs=[{"name": "updated", "halo": ["KEEP", "DROP", 0, 9, 9]}]) - update_rdf(source, update, output=source) - actual = source - assert actual["name"] == "updated" - assert actual["outputs"][0]["name"] == "updated" - assert actual["outputs"][0]["halo"] == [0, 0, 9, 9] - - -def test_update_rdf_using_rd(unet2d_nuclei_broad_latest): - from bioimageio.spec.commands import update_rdf - - source = load_raw_resource_description(unet2d_nuclei_broad_latest) - update = dict(name="updated", outputs=[{"name": "updated", "halo": ["KEEP", "DROP", 0, 9, 9]}]) - actual = update_rdf(source, update) - assert isinstance(actual, raw_nodes.Model) - assert actual.name == "updated" - assert actual.outputs[0].name == "updated" - assert actual.outputs[0].halo == [0, 0, 9, 9] diff --git a/tests/test_commands_with_collection.py b/tests/test_commands_with_collection.py deleted file mode 100644 index ee23caa64..000000000 --- a/tests/test_commands_with_collection.py +++ /dev/null @@ -1,19 +0,0 @@ -from bioimageio.spec import load_raw_resource_description, serialize_raw_resource_description_to_dict - - -def test_validate(unet2d_nuclei_broad_collection): - from bioimageio.spec.commands import validate - - assert not validate(unet2d_nuclei_broad_collection, update_format=True, update_format_inner=False)["error"] - assert not validate(unet2d_nuclei_broad_collection, update_format=False, update_format_inner=False)["error"] - - -def test_validate_invalid(unet2d_nuclei_broad_collection): - from bioimageio.spec.commands import validate - - raw_rd = load_raw_resource_description(unet2d_nuclei_broad_collection) - data = serialize_raw_resource_description_to_dict(raw_rd, convert_absolute_paths=False) - - data["collection"][0]["name"] = 1 # invalidate data - assert validate(data, update_format=True, update_format_inner=False)["error"] - assert validate(data, update_format=False, update_format_inner=False)["error"] diff --git a/tests/test_description.py b/tests/test_description.py new file mode 100644 index 000000000..edd88fe7c --- /dev/null +++ b/tests/test_description.py @@ -0,0 +1,43 @@ +from bioimageio.spec._description import validate_format +from bioimageio.spec._internal.io import BioimageioYamlContent +from bioimageio.spec._internal.root_url import RootHttpUrl +from bioimageio.spec._internal.validation_context import ValidationContext + +EXAMPLE_COM = RootHttpUrl("https://example.com/") + + +def test_forward_compatibility(unet2d_data: BioimageioYamlContent): + data = dict(unet2d_data) + v_future = "0.9999.0" + data["format_version"] = v_future # assume it is valid in a future format version + + summary = validate_format( + data, + context=ValidationContext(root=EXAMPLE_COM, perform_io_checks=False), + ) + assert summary.status == "passed", summary.errors + + # expect warning about treating future format version as latest + ws = summary.warnings + assert len(ws) >= 1, ws + assert ws[0].msg.startswith("future format_version '0.9999.0' treated as ") + + +def test_no_forward_compatibility(unet2d_data: BioimageioYamlContent): + data = dict(unet2d_data) + data["authors"] = 42 # make sure rdf is invalid + data["format_version"] = "0.9999.0" # assume it is valid in a future format version + + summary = validate_format( + data, + context=ValidationContext(root=EXAMPLE_COM, perform_io_checks=False), + ) + assert summary.status == "failed", summary + + assert len(summary.errors) == 1, summary.errors + assert summary.errors[0].loc == ("authors",), summary.errors[0].loc + + # expect warning about treating future format version as latest + ws = summary.warnings + assert len(ws) >= 1, ws + assert ws[0].msg.startswith("future format_version '0.9999.0' treated as ") diff --git a/tests/test_dump_spec.py b/tests/test_dump_spec.py deleted file mode 100644 index bb3ec5e3b..000000000 --- a/tests/test_dump_spec.py +++ /dev/null @@ -1,112 +0,0 @@ -import platform -from pathlib import Path - -import pytest - -from bioimageio.spec.dataset.raw_nodes import Dataset -from bioimageio.spec.shared import yaml - - -def test_spec_round_trip(unet2d_nuclei_broad_any_minor): - from bioimageio.spec import load_raw_resource_description, serialize_raw_resource_description_to_dict - - assert yaml is not None - expected = yaml.load(unet2d_nuclei_broad_any_minor) - # monkeypatch: yaml.load already converts timestamp to datetime.datetime, while we serialize it to ISO 8601 - if "timestamp" in expected: - expected["timestamp"] = expected["timestamp"].isoformat() - - # round-trip - raw_model = load_raw_resource_description(unet2d_nuclei_broad_any_minor) - serialized = serialize_raw_resource_description_to_dict(raw_model, convert_absolute_paths=True) - - assert isinstance(serialized, dict) - assert expected == serialized - - # as we converted absolute paths back to relative, we need to set the root path - serialized["root_path"] = unet2d_nuclei_broad_any_minor.parent - raw_model_from_serialized = load_raw_resource_description(serialized) - assert raw_model_from_serialized == raw_model - - -def test_spec_round_trip_w_attachments(unet2d_nuclei_broad_latest): - from bioimageio.spec import load_raw_resource_description, serialize_raw_resource_description_to_dict - - assert yaml is not None - data = yaml.load(unet2d_nuclei_broad_latest) - data["root_path"] = unet2d_nuclei_broad_latest.parent - - # monkeypatch: yaml.load already converts timestamp to datetime.datetime, while we serialize it to ISO 8601 - if "timestamp" in data: - data["timestamp"] = data["timestamp"].isoformat() - - data["attachments"] = {"files": ["some_file.ext"], "another_unknown_attachment": ["sub", "whatever", {"weird": 10}]} - - raw_model = load_raw_resource_description(data) - - serialized = serialize_raw_resource_description_to_dict(raw_model, convert_absolute_paths=True) - assert isinstance(serialized, dict) - serialized["root_path"] = unet2d_nuclei_broad_latest.parent - assert serialized == data - - raw_model_from_serialized = load_raw_resource_description(serialized) - assert raw_model_from_serialized == raw_model - - -def test_dataset_rdf_round_trip(dataset_rdf): - from bioimageio.spec import load_raw_resource_description, serialize_raw_resource_description_to_dict - - assert yaml is not None - data = yaml.load(dataset_rdf) - raw = load_raw_resource_description(data) - serialized = serialize_raw_resource_description_to_dict(raw) - assert data == serialized - - -# todo: fix test on windows -@pytest.mark.skipif( - platform.system() == "Windows", reason="OSError: [WinError 1314] A required privilege is not held by the client" -) -def test_serialize_with_link_in_path(dataset_rdf, tmp_path: Path): - from bioimageio.spec import load_raw_resource_description, serialize_raw_resource_description_to_dict - - data = load_raw_resource_description(dataset_rdf) - assert isinstance(data, Dataset) - true_root = tmp_path / "root" - true_root.mkdir() - linked_root = tmp_path / "link" - linked_root.symlink_to(true_root, target_is_directory=True) - - doc_path = linked_root / "docs.md" - doc_path.write_text("# Documentation") - - data.root_path = true_root - data.documentation = doc_path # doc path only in root through link - - serialized = serialize_raw_resource_description_to_dict(data, convert_absolute_paths=True) - assert serialized["documentation"] == "docs.md" - - -# todo: fix test on windows -@pytest.mark.skipif( - platform.system() == "Windows", reason="OSError: [WinError 1314] A required privilege is not held by the client" -) -def test_serialize_with_link_in_root(dataset_rdf, tmp_path: Path): - from bioimageio.spec import load_raw_resource_description, serialize_raw_resource_description_to_dict - - data = load_raw_resource_description(dataset_rdf) - assert isinstance(data, Dataset) - - true_root = tmp_path / "root" - true_root.mkdir() - linked_root = tmp_path / "link" - linked_root.symlink_to(true_root, target_is_directory=True) - - doc_path = true_root / "docs.md" - doc_path.write_text("# Documentation") - - data.root_path = linked_root # root path is symlink to true root - data.documentation = doc_path - - serialized = serialize_raw_resource_description_to_dict(data, convert_absolute_paths=True) - assert serialized["documentation"] == "docs.md" diff --git a/tests/test_example_specs.py b/tests/test_example_specs.py new file mode 100644 index 000000000..1f64f35e1 --- /dev/null +++ b/tests/test_example_specs.py @@ -0,0 +1,78 @@ +from pathlib import Path +from typing import Any, Iterable, Mapping + +import pytest + +from bioimageio.spec._description import DISCOVER, LATEST +from bioimageio.spec._internal.types import FormatVersionPlaceholder +from tests.conftest import EXAMPLE_SPECS +from tests.utils import ParameterSet, check_bioimageio_yaml + + +def get_param(descr_path: Path) -> ParameterSet: + key = descr_path.relative_to(EXAMPLE_SPECS).as_posix() + return pytest.param(descr_path, key, id=key) + + +def yield_valid_descr_paths() -> Iterable[ParameterSet]: + assert EXAMPLE_SPECS.exists() + for p in EXAMPLE_SPECS.glob("**/*bioimageio.yaml"): + if p.name.startswith("invalid"): + continue + + yield get_param(p) + + +def yield_invalid_descr_paths() -> Iterable[ParameterSet]: + assert EXAMPLE_SPECS.exists() + for p in EXAMPLE_SPECS.glob("**/invalid*bioimageio.yaml"): + yield get_param(p) + + +EXCLUDE_FIELDS_FROM_ROUNDTRIP = { + "models/stardist_example_model/v0_4.bioimageio.yaml": {"dependencies"}, + "models/stardist_example_model/wrong_shape_v0_4.bioimageio.yaml": {"dependencies"}, + "models/stardist_example_model/wrong_shape2_v0_4.bioimageio.yaml": {"dependencies"}, + "models/unet2d_diff_output_shape/v0_4.bioimageio.yaml": { + "dependencies", + "weights", + }, + "models/unet2d_multi_tensor/v0_4.bioimageio.yaml": {"dependencies", "weights"}, + "models/unet2d_nuclei_broad/v0_4_0.bioimageio.yaml": { + "dependencies", + "weights", + "version", + }, + "models/unet2d_nuclei_broad/v0_4_9.bioimageio.yaml": {"version"}, + "models/upsample_test_model/v0_4.bioimageio.yaml": {"dependencies", "weights"}, +} + + +@pytest.mark.parametrize("format_version", [DISCOVER, LATEST]) +@pytest.mark.parametrize("descr_path,key", list(yield_valid_descr_paths())) +def test_example_descr_paths( + descr_path: Path, + key: str, + format_version: FormatVersionPlaceholder, + bioimageio_json_schema: Mapping[Any, Any], +): + check_bioimageio_yaml( + descr_path, + root=descr_path.parent, + as_latest=format_version == LATEST, + exclude_fields_from_roundtrip=EXCLUDE_FIELDS_FROM_ROUNDTRIP.get(key, set()), + bioimageio_json_schema=bioimageio_json_schema, + ) + + +@pytest.mark.parametrize("descr_path,key", list(yield_invalid_descr_paths())) +def test_invalid_example_descr_paths( + descr_path: Path, key: str, bioimageio_json_schema: Mapping[Any, Any] +): + check_bioimageio_yaml( + descr_path, + root=descr_path.parent, + as_latest=False, + is_invalid=True, + bioimageio_json_schema=bioimageio_json_schema, + ) diff --git a/tests/test_fields.py b/tests/test_fields.py deleted file mode 100644 index a663d145f..000000000 --- a/tests/test_fields.py +++ /dev/null @@ -1,154 +0,0 @@ -import pathlib -from datetime import datetime, timezone -from typing import Any, Dict - -import numpy -import pytest -from marshmallow import Schema, ValidationError -from numpy.testing import assert_equal -from pytest import raises - -from bioimageio.spec.model import schema -from bioimageio.spec.shared import fields, raw_nodes - - -class TestArray: - def test_unequal_nesting_depth(self): - with raises(ValidationError): - fields.Array(fields.Integer(strict=True)).deserialize([[1, 2], 3]) - - def test_uneuqal_sublen(self): - with raises(ValidationError): - fields.Array(fields.Integer(strict=True)).deserialize([[1, 2], [3]]) - - def test_scalar(self): - data = 1 - expected = data - actual = fields.Array(fields.Integer(strict=True)).deserialize(data) - assert_equal(actual, expected) - - def test_invalid_scalar(self): - data = "invalid" - with raises(ValidationError): - fields.Array(fields.Integer(strict=True)).deserialize(data) - - def test_2d(self): - data = [[1, 2], [3, 4]] - expected = numpy.array(data, dtype=int) - actual = fields.Array(fields.Integer(strict=True)).deserialize(data) - assert_equal(actual, expected) - - def test_wrong_dtype(self): - data = [[1, 2], [3, 4.5]] - with raises(ValidationError): - fields.Array(fields.Integer(strict=True)).deserialize(data) - - -class TestDateTime: - def test_datetime_from_str(self): - timestamp = "2019-12-11T12:22:32+00:00" - expected = datetime.fromisoformat(timestamp) - actual = fields.DateTime().deserialize(timestamp) - assert expected == actual - - def test_datetime_from_datetime(self): - expected = datetime.now() - assert expected == fields.DateTime().deserialize(expected) - - def test_datetime_iso_with_zulu_offset(self): - timestamp_non_zulu = "2019-12-11T12:22:32+00:00" - timestamp_zulu = "2019-12-11T12:22:32Z" - expected = datetime(2019, 12, 11, 12, 22, 32, tzinfo=timezone.utc) - actual1 = fields.DateTime().deserialize(timestamp_non_zulu) - actual2 = fields.DateTime().deserialize(timestamp_zulu) - assert expected == actual1 - assert expected == actual2 - - -class TestShape: - def test_explicit_input_shape(self): - data = [1, 2, 3] - expected = data - actual = fields.ExplicitShape().deserialize(data) - assert expected == actual - - def test_explicit_output_shape(self): - data = [1, 2, 3] - expected = data - actual = schema.OutputTensor().fields["shape"].deserialize(data) - assert expected == actual - - def test_min_step_input_shape(self): - data = {"min": [1, 2, 3], "step": [0, 1, 3]} - expected = raw_nodes.ParametrizedInputShape(**data) - actual = fields.Union( - [fields.ExplicitShape(), fields.Nested(schema.ParametrizedInputShape())], required=True - ).deserialize(data) - assert actual == expected - - def test_output_shape(self): - data: Dict[str, Any] = {"reference_tensor": "in1", "scale": [1, 2, 3], "offset": [0, 1, 3]} - expected = raw_nodes.ImplicitOutputShape(**data) - actual = fields.Union( - [fields.ExplicitShape(), fields.Nested(schema.ImplicitOutputShape())], required=True - ).deserialize(data) - assert actual == expected - - -class TestURI: - def test_missing_scheme_is_invalid(self): - # local relative paths used to be valid "uris" - relative_path = "relative_file/path.txt" - - with pytest.raises(ValidationError): - fields.URI().deserialize(relative_path) - - -class TestUnion: - def test_error_messages(self): - union = fields.Union([fields.String(), fields.Number()]) - try: - union.deserialize([1]) - except ValidationError as e: - assert isinstance(e, ValidationError) - assert len(e.messages) == 3, e.messages - - def test_union_with_absolute_path(self): - class DummySchema(Schema): - source = fields.Union([fields.URI(), fields.RelativeLocalPath()]) # we use this case in a lot of places - - s = DummySchema() - data = dict(source="C:/repos") - - with pytest.raises(ValidationError): - s.load(data) - - -class TestRelativeLocalPath: - def test_simple_file_name(self): - fname = "unet2d.py" - expected = pathlib.Path(fname) - actual = fields.RelativeLocalPath().deserialize(fname) - assert isinstance(actual, pathlib.Path) - assert actual == expected - - -class TestDependencies: - class MinimalSchema(Schema): - dep = fields.Dependencies() - - def test_url_roundtrip(self): - manager = "conda" - file = "https://raw.githubusercontent.com/bioimage-io/spec-bioimage-io/main/example_specs/models/unet2d_nuclei_broad/environment.yaml" - dep_input = dict(dep=f"{manager}:{file}") - s = self.MinimalSchema() - - node = s.load(dep_input) - dep = node["dep"] - assert isinstance(dep, raw_nodes.Dependencies) - assert dep.manager == manager - assert isinstance(dep.file, raw_nodes.URI) - assert str(dep.file) == file - - dep_serialized = s.dump(node) - assert dep_serialized == dep_input diff --git a/tests/test_format_version_conversion.py b/tests/test_format_version_conversion.py deleted file mode 100644 index 3d50a470b..000000000 --- a/tests/test_format_version_conversion.py +++ /dev/null @@ -1,71 +0,0 @@ -from dataclasses import asdict -from typing import Tuple - -import pytest -from packaging.version import Version - -from bioimageio.spec.model import schema -from bioimageio.spec.shared import yaml - - -def test_model_format_version_conversion(unet2d_nuclei_broad_before_latest, unet2d_nuclei_broad_latest): - from bioimageio.spec.model.converters import maybe_convert - - assert yaml is not None - old_model_data = yaml.load(unet2d_nuclei_broad_before_latest) - model_data = yaml.load(unet2d_nuclei_broad_latest) - - expected = asdict(schema.Model().load(model_data)) - converted_data = maybe_convert(old_model_data) - actual = asdict(schema.Model().load(converted_data)) - - if old_model_data["format_version"] == "0.1.0": - # expect converted description - for ipt in expected["inputs"]: - ipt["description"] = ipt["name"] - - for out in expected["outputs"]: - out["description"] = out["name"] - - # ignore new optional fields - if tuple(map(int, old_model_data["format_version"].split("."))) < (0, 4, 0): - expected.pop("maintainers") - actual.pop("maintainers") - if tuple(map(int, old_model_data["format_version"].split("."))) < (0, 4, 4): - expected.pop("download_url") - actual.pop("download_url") - expected.pop("training_data") - actual.pop("training_data") - - for key, item in expected.items(): - assert key in actual, key - assert actual[key] == item, key - - for key, item in actual.items(): - assert key in expected - assert item == expected[key] - - -# todo: break forward compatibility on major version difference? -@pytest.mark.parametrize("v_diff", [(0, 0, 1), (0, 1, 0), (1, 0, 0), (0, 1, 1)]) -def test_forward_compatible(v_diff: Tuple[int, int, int], unet2d_nuclei_broad_latest): - from bioimageio.spec import load_raw_resource_description - from bioimageio.spec.model import format_version - - fv_key = "format_version" - - assert yaml is not None - model_data = yaml.load(unet2d_nuclei_broad_latest) - - v_latest: Version = Version(format_version) - v_future: str = ".".join( - [str(latest + diff) for latest, diff in zip([v_latest.major, v_latest.minor, v_latest.micro], v_diff)] - ) - - future_model_data = dict(model_data) - future_model_data[fv_key] = v_future - - rd = load_raw_resource_description(future_model_data) - assert rd.format_version == format_version - assert hasattr(rd, "config") - assert rd.config["bioimageio"]["original_format_version"] == v_future diff --git a/tests/test_generic/__init__.py b/tests/test_generic/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/test_generic/test_v0_2.py b/tests/test_generic/test_v0_2.py new file mode 100644 index 000000000..3bdb10ac2 --- /dev/null +++ b/tests/test_generic/test_v0_2.py @@ -0,0 +1,188 @@ +from __future__ import annotations + +from typing import Any, Dict, Type + +import pytest + +from bioimageio.spec._internal.common_nodes import Node +from bioimageio.spec._internal.validation_context import ValidationContext +from bioimageio.spec._internal.warning_levels import WARNING +from bioimageio.spec.generic.v0_2 import ( + AttachmentsDescr, + Author, + CiteEntry, + GenericDescr, + Maintainer, +) +from tests.utils import check_node + +EXAMPLE_COM = "https://example.com/" + + +@pytest.mark.parametrize( + "node_class,kwargs,is_valid", + [ + (AttachmentsDescr, dict(magic_number=5), True), + (AttachmentsDescr, dict(extra=dict(more="of this")), True), + (AttachmentsDescr, dict(only="other stuff"), True), + (AttachmentsDescr, dict(files="not a list"), False), + (AttachmentsDescr, dict(files=["non-existing-file"]), False), + (AttachmentsDescr, dict(files=[123]), False), + (Author, dict(name="only_name"), True), + ( + Author, + dict( + name="Me", + affiliation="Paradise", + email="them@example.com", + github_user="ghuser", + orcid="0000-0001-2345-6789", + ), + True, + ), + ( + Author, + dict( + affiliation="Paradise", + email="you@example.com", + github_user="ghuser", + orcid="0000-0001-2345-6789", + ), + False, + ), + ( + Author, + dict( + name="Me", + affiliation="Paradise", + email="me@example.com", + github_user="ghuser", + orcid="0000-0001-2345-6788", + ), + False, + ), + (Author, dict(name=5), False), + (Maintainer, dict(github_user="ghuser_only"), True), + ( + Maintainer, + dict( + name="Me", + affiliation="Paradise", + email="me@example.com", + github_user="ghuser", + orcid="0000-0001-2345-6789", + ), + True, + ), + (Maintainer, dict(name="without_gh"), False), + (Maintainer, dict(github_user=5.5), False), + (CiteEntry, dict(text="lala", url=EXAMPLE_COM), True), + (CiteEntry, dict(text="lala", doi="10.1234fakedoi"), True), + ( + CiteEntry, + dict( + text="Title", + doi="10.1109/5.771073", + url="https://ieeexplore.ieee.org/document/771073", + ), + True, + ), + (CiteEntry, dict(text="lala"), False), + (CiteEntry, dict(url=EXAMPLE_COM), False), + ( + GenericDescr, + dict( + authors=[{"name": "Me"}], + description="the description", + format_version=GenericDescr.implemented_format_version, + license="BSD-2-Clause-FreeBSD", + name="my name", + type="my_type", + unknown_extra_field="present", + version="0.1.0", + ), + True, + ), + ( + GenericDescr, + dict( + format_version=GenericDescr.implemented_format_version, + name="your name", + description="my description", + attachments={"something": None}, + type="my_type", + version="0.1.0", + ), + True, + ), + (GenericDescr, dict(text="lala"), False), + (GenericDescr, dict(url=EXAMPLE_COM), False), + ( + GenericDescr, + dict( + authors=[{"name": "Me"}], + description="the description", + format_version=GenericDescr.implemented_format_version, + license="BSD-2-Clause-FreeBSD", + name="my name", + type="my_type", + unknown_extra_field="present", + version="0.1.0", + ), + True, + ), + ( + GenericDescr, + dict( + format_version=GenericDescr.implemented_format_version, + name="your name", + description="my description", + attachments={"something": 42}, + type="my_type", + version="0.1.0", + ), + True, + ), + ( + GenericDescr, + dict( + format_version=GenericDescr.implemented_format_version, + version="0.1.0", + type="my_type", + name="their name", + ), + False, + ), + ( + GenericDescr, + dict( + format_version=GenericDescr.implemented_format_version, + version="0.1.0", + type="my_type", + name="its name", + attachments={"files": ["missing"], "something": 42}, + ), + False, + ), + ], +) +def test_node(node_class: Type[Node], kwargs: Dict[str, Any], is_valid: bool): + check_node(node_class, kwargs, is_invalid=not is_valid) + + +def test_deprecated_license_in_generic(): + check_node( + GenericDescr, + dict( + format_version=GenericDescr.implemented_format_version, + name="my name", + description="my description", + authors=[{"name": "Me"}], + type="my_type", + version="0.1.0", + license="BSD-2-Clause-FreeBSD", + cite=[dict(text="lala", url=EXAMPLE_COM)], + ), + context=ValidationContext(warning_level=WARNING), + is_invalid=True, + ) diff --git a/tests/test_generic/test_v0_3.py b/tests/test_generic/test_v0_3.py new file mode 100644 index 000000000..7f34db34a --- /dev/null +++ b/tests/test_generic/test_v0_3.py @@ -0,0 +1,222 @@ +from __future__ import annotations + +from pathlib import Path, PurePath +from typing import Any, Dict, Union + +import pytest +from pydantic import ( + FilePath, + TypeAdapter, + ValidationError, +) +from typing_extensions import Annotated, assert_never + +from bioimageio.spec._internal.io import ( + RelativeFilePath, + WithSuffix, + validate_suffix, +) +from bioimageio.spec._internal.io_basics import AbsoluteFilePath +from bioimageio.spec._internal.url import HttpUrl +from bioimageio.spec._internal.validation_context import ValidationContext +from bioimageio.spec._internal.warning_levels import WARNING +from bioimageio.spec.generic.v0_3 import DocumentationSource, GenericDescr +from tests.conftest import UNET2D_ROOT +from tests.utils import check_node + +EXAMPLE_COM = "https://example.com/" +EXAMPLE_COM_FILE = "https://example.com/file" + + +@pytest.mark.parametrize( + "kwargs", + [ + dict( + authors=[{"name": "Me"}], + cite=[dict(text="lala", url=EXAMPLE_COM)], + description="the description", + format_version=GenericDescr.implemented_format_version, + license="BSD-2-Clause-FreeBSD", + name="my name", + type="my_type", + unknown_extra_field="present", + version="1", + ), + dict( + attachments={"files": [EXAMPLE_COM_FILE], "something": 42}, + authors=[{"name": "Me"}], + cite=[dict(text="lala", url=EXAMPLE_COM)], + description="my description", + format_version=GenericDescr.implemented_format_version, + license="BSD-2-Clause-FreeBSD", + name="your name", + type="my_type", + version="2", + ), + ], +) +def test_generic_valid(kwargs: Dict[str, Any]): + check_node(GenericDescr, kwargs, context=ValidationContext(perform_io_checks=False)) + + +@pytest.mark.parametrize( + "kwargs,context", + [ + pytest.param( + dict( + format_version=GenericDescr.implemented_format_version, + name="my name", + description="my description", + authors=[{"name": "Me"}], + type="my_type", + version="0.1.0", + license="BSD-2-Clause-FreeBSD", + cite=[dict(text="lala", url=EXAMPLE_COM)], + ), + ValidationContext(warning_level=WARNING, perform_io_checks=False), + id="deprecated license", + ), + ( + dict( + format_version=GenericDescr.implemented_format_version, + version="0.1.0", + type="my_type", + name="their name", + ), + ValidationContext(perform_io_checks=False), + ), + ( + dict( + format_version=GenericDescr.implemented_format_version, + version="0.1.0", + type="my_type", + name="its name", + attachments={"files": [Path(__file__), "missing"], "something": 42}, + ), + ValidationContext(perform_io_checks=False), + ), + ], +) +def test_generic_invalid(kwargs: Dict[str, Any], context: ValidationContext): + check_node(GenericDescr, kwargs, context=context, is_invalid=True) + + +# @pytest.mark.parametrize("src", [UNET2D_ROOT / "README.md", text_md_url]) +def test_documentation_source(): + from bioimageio.spec.generic.v0_3 import DocumentationSource + + doc_src = "https://example.com/away.md" + adapter = TypeAdapter(DocumentationSource) + with ValidationContext(perform_io_checks=False): + valid = adapter.validate_python(doc_src) + + assert str(valid) == doc_src + + +def test_documentation_source_abs_path(): + from bioimageio.spec.generic.v0_3 import DocumentationSource + + doc_src = UNET2D_ROOT / "README.md" + assert doc_src.exists(), doc_src + adapter = TypeAdapter(DocumentationSource) + + valid = adapter.validate_python(doc_src) + assert str(valid) == str(doc_src) + + data = adapter.dump_python(valid, mode="python") + assert str(data) == str(doc_src) + data = adapter.dump_python(valid, mode="json") + assert str(data) == str(doc_src) + + doc_src = UNET2D_ROOT / "does_not_exist.md" + assert not doc_src.exists(), doc_src + with pytest.raises(ValidationError): + _ = adapter.validate_python(doc_src) + + +with ValidationContext(perform_io_checks=False): + text_md_url = HttpUrl("https://example.com/text.md") + + +def validate_md_suffix(value: Union[AbsoluteFilePath, RelativeFilePath, HttpUrl]): + return validate_suffix(value, ".md", case_sensitive=True) + + +@pytest.mark.parametrize( + "src,adapter", + [ + (UNET2D_ROOT / "README.md", a) + for a in [ + TypeAdapter( + Annotated[FilePath, WithSuffix(".md", case_sensitive=True)] + ), # pyright: ignore[reportCallIssue] + TypeAdapter( + Annotated[Path, WithSuffix(".md", case_sensitive=True)] + ), # pyright: ignore[reportCallIssue] + TypeAdapter( + Annotated[PurePath, WithSuffix(".md", case_sensitive=True)] + ), # pyright: ignore[reportCallIssue] + TypeAdapter( + Annotated[ + Union[PurePath, HttpUrl], WithSuffix(".md", case_sensitive=True) + ] + ), + TypeAdapter( + Annotated[ + Union[AbsoluteFilePath, RelativeFilePath, HttpUrl], + WithSuffix(".md", case_sensitive=True), + ] + ), + TypeAdapter(DocumentationSource), + TypeAdapter( + Annotated[DocumentationSource, WithSuffix(".md", case_sensitive=True)] + ), + ] + ] + + [ + (text_md_url, a) + for a in [ + TypeAdapter( + Annotated[HttpUrl, WithSuffix(".md", case_sensitive=True)] + ), # pyright: ignore[reportCallIssue] + TypeAdapter( + Annotated[ + Union[PurePath, HttpUrl], WithSuffix(".md", case_sensitive=True) + ] + ), + TypeAdapter( + Annotated[ + Union[AbsoluteFilePath, RelativeFilePath, HttpUrl], + WithSuffix(".md", case_sensitive=True), + ] + ), + TypeAdapter(DocumentationSource), + TypeAdapter( + Annotated[DocumentationSource, WithSuffix(".md", case_sensitive=True)] + ), + ] + ], +) +def test_with_suffix(src: Union[Path, HttpUrl], adapter: TypeAdapter[Any]): + with ValidationContext(perform_io_checks=False): + valid = adapter.validate_python(src) + + assert isinstance(valid, type(src)) + + # dump_python(..., mode="python") returns RootModel's as is for some TypeAdapters based on Unions + # see https://github.com/pydantic/pydantic/issues/8963 + # obj = adapter.dump_python(valid, mode="python") + # if isinstance(src, Path): + # assert obj == src + # elif isinstance(src, HttpUrl): + # assert obj == str(src) + # else: + # assert_never(src) + + json_obj = adapter.dump_python(valid, mode="json") + if isinstance(src, Path): + assert json_obj == str(src) + elif isinstance(src, HttpUrl): + assert json_obj == str(src) + else: + assert_never(src) diff --git a/tests/test_internal/__init__.py b/tests/test_internal/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/test_internal/test_base_nodes.py b/tests/test_internal/test_base_nodes.py new file mode 100644 index 000000000..d11a2986f --- /dev/null +++ b/tests/test_internal/test_base_nodes.py @@ -0,0 +1,21 @@ +from typing import Any + + +def test_converter_with_arg(): + from bioimageio.spec._internal.common_nodes import Converter, Node + + class A(Node): + a: int + + class B(Node): + b: str + + class AtoB(Converter[A, B, str]): + def _convert( + self, src: A, tgt: "type[B] | type[dict[str, Any]]", /, prefix: str + ) -> "B | dict[str, Any]": + return tgt(b=prefix + str(src.a)) + + converter = AtoB(A, B) + + _ = converter.convert(A(a=5), "prefix") diff --git a/tests/test_internal/test_constants.py b/tests/test_internal/test_constants.py new file mode 100644 index 000000000..6f705a4c9 --- /dev/null +++ b/tests/test_internal/test_constants.py @@ -0,0 +1,38 @@ +import logging +import re + +import pytest + +from bioimageio.spec._internal.warning_levels import ALERT, INFO, WARNING + + +@pytest.mark.parametrize("unit", ["lxยทs", "kg/m^2ยทs^-2"]) +def test_si_unit(unit: str): + from bioimageio.spec._internal.constants import SI_UNIT_REGEX + + assert re.fullmatch(SI_UNIT_REGEX, unit) + + +@pytest.mark.parametrize("unit", ["lxs", " kg", "kg/m^-2"]) +def test_invalid_si_uni(unit: str): + from bioimageio.spec._internal.constants import SI_UNIT_REGEX + + assert not re.fullmatch(SI_UNIT_REGEX, unit) + + +def test_warning_levels(): + """test that our warning levels nicely match python's logging module's levels""" + assert ALERT < logging.ERROR + assert ALERT > logging.WARNING + assert WARNING == logging.WARNING + assert INFO == logging.INFO + + +def test_known_gh_users_are_lowercase(): + from bioimageio.spec._internal.constants import ( + KNOWN_GH_USERS, + KNOWN_INVALID_GH_USERS, + ) + + assert KNOWN_GH_USERS == {user.lower() for user in KNOWN_GH_USERS} + assert KNOWN_INVALID_GH_USERS == {user.lower() for user in KNOWN_INVALID_GH_USERS} diff --git a/tests/test_internal/test_file_source.py b/tests/test_internal/test_file_source.py new file mode 100644 index 000000000..f82cb2b81 --- /dev/null +++ b/tests/test_internal/test_file_source.py @@ -0,0 +1,27 @@ +import pytest + +from bioimageio.spec._internal.io_basics import FileName + + +@pytest.mark.parametrize( + "name", + [ + "bioimageio.yaml", + "rdf.yaml", + "model.yaml", + "smth.bioimageio.yaml", + "smth.rdf.yaml", + "smth.model.yaml", + ], +) +def test_is_valid_rdf_name(name: FileName): + from bioimageio.spec._internal.io import is_valid_rdf_name + + assert is_valid_rdf_name(name), name + + +@pytest.mark.parametrize("name", ["bioimageio.yml", "RDF.yaml", "smth.yaml"]) +def test_is_invalid_rdf_name(name: FileName): + from bioimageio.spec._internal.io import is_valid_rdf_name + + assert not is_valid_rdf_name(name), name diff --git a/tests/test_internal/test_license_id.py b/tests/test_internal/test_license_id.py new file mode 100644 index 000000000..57e1721b3 --- /dev/null +++ b/tests/test_internal/test_license_id.py @@ -0,0 +1,22 @@ +import pytest +from pydantic import ValidationError + + +def test_license_id(): + from bioimageio.spec._internal.license_id import LicenseId + + _ = LicenseId("MIT") + + with pytest.raises(ValidationError): + _ = LicenseId("not_a_valid_license_id") # pyright: ignore[reportArgumentType] + + +def test_deprecated_license_id(): + from bioimageio.spec._internal.license_id import DeprecatedLicenseId + + _ = DeprecatedLicenseId("AGPL-1.0") + + with pytest.raises(ValidationError): + _ = DeprecatedLicenseId( + "not_a_valid_license_id" # pyright: ignore[reportArgumentType] + ) diff --git a/tests/test_internal/test_node.py b/tests/test_internal/test_node.py new file mode 100644 index 000000000..f59a708ca --- /dev/null +++ b/tests/test_internal/test_node.py @@ -0,0 +1,71 @@ +from typing import Any, Dict + +import pytest +from annotated_types import Ge +from typing_extensions import Annotated + +from bioimageio.spec._internal.common_nodes import Node +from bioimageio.spec._internal.field_warning import warn +from bioimageio.spec._internal.validation_context import ValidationContext +from bioimageio.spec._internal.warning_levels import ( + ALERT, + ERROR, + INFO, + WARNING, +) +from tests.utils import WARNING_LEVEL_CONTEXT_KEY, check_node + + +class DummyNode(Node): + a: Annotated[int, warn(Ge(0), "smaller than zero")] = 0 + b: Annotated[int, warn(Ge(0), "smaller than zero", WARNING)] = 0 + + +class NestedDummyNode(Node): + dummy: DummyNode + + +DUMMY_INPUT: Dict[str, Any] = {"a": -1, "b": -1} +NESTED_DICT_DUMMY_INPUT = dict(dummy=DUMMY_INPUT) +NESTED_NODE_DUMMY_INPUT = dict(dummy=DummyNode(**DUMMY_INPUT)) + + +@pytest.mark.parametrize( + "kwargs,context,valid", + [ + (DUMMY_INPUT, {}, True), + (DUMMY_INPUT, {WARNING_LEVEL_CONTEXT_KEY: ERROR}, True), + (DUMMY_INPUT, {WARNING_LEVEL_CONTEXT_KEY: ALERT}, True), + (DUMMY_INPUT, {WARNING_LEVEL_CONTEXT_KEY: WARNING}, False), + (DUMMY_INPUT, {WARNING_LEVEL_CONTEXT_KEY: INFO}, False), + ], +) +def test_warn(kwargs: Dict[str, Any], context: ValidationContext, valid: bool): + check_node(DummyNode, kwargs, context=context, is_invalid=not valid) + + +@pytest.mark.parametrize( + "kwargs,context,valid", + [ + (NESTED_DICT_DUMMY_INPUT, {}, True), + (NESTED_DICT_DUMMY_INPUT, {WARNING_LEVEL_CONTEXT_KEY: ERROR}, True), + (NESTED_DICT_DUMMY_INPUT, {WARNING_LEVEL_CONTEXT_KEY: ALERT}, True), + (NESTED_NODE_DUMMY_INPUT, {}, True), + (NESTED_NODE_DUMMY_INPUT, {WARNING_LEVEL_CONTEXT_KEY: ERROR}, True), + (NESTED_NODE_DUMMY_INPUT, {WARNING_LEVEL_CONTEXT_KEY: ALERT}, True), + (NESTED_DICT_DUMMY_INPUT, {WARNING_LEVEL_CONTEXT_KEY: WARNING}, False), + (NESTED_DICT_DUMMY_INPUT, {WARNING_LEVEL_CONTEXT_KEY: INFO}, False), + ( + NESTED_NODE_DUMMY_INPUT, + {WARNING_LEVEL_CONTEXT_KEY: WARNING}, + True, # no reevaluation of node instance + ), + ( + NESTED_NODE_DUMMY_INPUT, + {WARNING_LEVEL_CONTEXT_KEY: INFO}, + True, # no reevaluation of node instance + ), + ], +) +def test_warn_nested(kwargs: Dict[str, Any], context: ValidationContext, valid: bool): + check_node(NestedDummyNode, kwargs, context=context, is_invalid=not valid) diff --git a/tests/test_internal/test_types.py b/tests/test_internal/test_types.py new file mode 100644 index 000000000..e5aebe962 --- /dev/null +++ b/tests/test_internal/test_types.py @@ -0,0 +1,202 @@ +import typing +from datetime import datetime +from pathlib import Path + +import pytest +from dateutil.parser import isoparse +from pydantic import PlainSerializer, TypeAdapter +from typing_extensions import Annotated + +from bioimageio.spec._internal import types +from bioimageio.spec._internal.io import RelativeFilePath +from bioimageio.spec._internal.types import Datetime, SiUnit +from tests.utils import check_type + +TYPE_ARGS = { + types.ApplicationId: "appdev/app", + types.CollectionId: "collectionid", + types.DatasetId: "dataset-id", + types.Datetime: (2024, 2, 14), + types.Datetime: datetime.now().isoformat(), + types.DeprecatedLicenseId: "AGPL-1.0", + types.Doi: "10.5281/zenodo.5764892", + types.Identifier: "id", + types.IdentifierAnno: "id", + types.ImportantFileSource: "README.md", + types.LicenseId: "MIT", + types.LowerCaseIdentifier: "id", + types.LowerCaseIdentifierAnno: "id", + types.ModelId: "modelid", + types.NotebookId: "notebookid", + types.OrcidId: "0000-0001-2345-6789", + types.RelativeFilePath: Path(__file__).relative_to(Path().absolute()), + types.ResourceId: "resoruce-id", + types.SiUnit: "kg", + types.AbsoluteDirectory: str(Path(__file__).absolute().parent), + types.AbsoluteFilePath: str(Path(__file__).absolute()), + types.FileName: "lala.py", + types.Version: "1.0", + types.HttpUrl: "http://example.com", + types.Sha256: "0" * 64, +} + +IGNORE_TYPES_MEMBERS = { + "AfterValidator", + "annotated_types", + "Annotated", + "annotations", + "Any", + "BeforeValidator", + "datetime", + "field_validation", + "FormatVersionPlaceholder", # a literal + "ImportantFileSource", # an annoated union + "iskeyword", + "isoparse", + "Literal", + "NotEmpty", + "PlainSerializer", + "RootModel", + "Sequence", + "StringConstraints", + "TypeVar", + "typing", + "Union", + "ValidatedString", + "YamlValue", +} + + +@pytest.mark.parametrize( + "name", + [ + name + for name in dir(types) + if not name.startswith("_") and name not in IGNORE_TYPES_MEMBERS + ], +) +def test_type_is_usable(name: str): + """check if a type can be instantiated or is a common Python type (e.g. Union or Literal)""" + typ = getattr(types, name) + if typ in TYPE_ARGS: + args = TYPE_ARGS[typ] + if not isinstance(args, tuple): + args = (args,) + _ = typ(*args) + elif isinstance(typ, str): + pass # ignore string constants + else: + origin = typing.get_origin(typ) + assert origin in (dict, list, typing.Union, typing.Literal) or type(typ) in ( + typing.TypeVar, + ), name + + +@pytest.mark.parametrize("path", [Path(__file__), Path()]) +def test_relative_path(path: Path): + with pytest.raises(ValueError): + _ = RelativeFilePath(path.absolute()) + + with pytest.raises(ValueError): + _ = RelativeFilePath( + str(path.absolute()) # pyright: ignore[reportArgumentType] + ) + + with pytest.raises(ValueError): + _ = RelativeFilePath( + path.absolute().as_posix() # pyright: ignore[reportArgumentType] + ) + + +@pytest.mark.parametrize("value", ["lxยทs", "kg/m^2ยทs^-2"]) +def test_si_unit(value: str): + check_type(SiUnit, value) + + +@pytest.mark.parametrize("value", ["lxs", " kg"]) +def test_si_unit_invalid(value: str): + check_type(SiUnit, value, is_invalid=True) + + +@pytest.mark.parametrize( + "value,expected", + [ + ( + "2019-12-11T12:22:32+00:00", + isoparse("2019-12-11T12:22:32+00:00"), + ), + ( + "2019-12-11T12:22:32", + datetime(2019, 12, 11, 12, 22, 32), + ), + ( + "2019-12-11T12:22:32-00:08", + isoparse("2019-12-11T12:22:32-00:08"), + ), + ], +) +def test_datetime(value: str, expected: datetime): + check_type( + Datetime, + value, + expected_root=expected, + expected_deserialized=value, + ) + + +@pytest.mark.parametrize( + "value", + [ + "2024-03-06T14:21:34.384830", + "2024-03-06T14:21:34+00:00", + "2024-03-06T14:21:34+00:05", + "2024-03-06T14:21:34-00:08", + "2019-12-11T12:22:32Z", + ], +) +def test_datetime_more(value: str): + from bioimageio.spec._internal.types import ( + _serialize_datetime_json, # pyright: ignore[reportPrivateUsage] + ) + + root_adapter = TypeAdapter(Datetime) + datetime_adapter = TypeAdapter( # pyright: ignore[reportCallIssue] + Annotated[ + datetime, + PlainSerializer(_serialize_datetime_json, when_used="json-unless-none"), + ] + ) + + expected = isoparse(value) + + actual_init = Datetime(expected) + assert actual_init.root == expected + + actual_root = root_adapter.validate_python(value) + assert actual_root.root == expected + assert root_adapter.dump_python(actual_root, mode="python") == expected + assert root_adapter.dump_python(actual_root, mode="json") == value.replace( + "Z", "+00:00" + ) + + actual_datetime = datetime_adapter.validate_python(value) + assert actual_datetime == expected + assert datetime_adapter.dump_python(actual_datetime, mode="python") == expected + assert datetime_adapter.dump_python(actual_datetime, mode="json") == value.replace( + "Z", "+00:00" + ) + + +@pytest.mark.parametrize( + "value", + [ + "2019-12-11T12:22:32+00/00", + "2019-12-11T12:22:32Y", + "2019-12-11T12:22:32Zulu", + "201912-11T12:22:32+00:00", + "now", + "today", + ], +) +def test_datetime_invalid(value: str): + check_type(Datetime, value, is_invalid=True) diff --git a/tests/test_internal/test_utils.py b/tests/test_internal/test_utils.py new file mode 100644 index 000000000..ba457c056 --- /dev/null +++ b/tests/test_internal/test_utils.py @@ -0,0 +1,51 @@ +from contextlib import nullcontext +from typing import Any, Dict, Tuple + +import pytest + + +@pytest.mark.parametrize( + "args,kwargs,valid", + [ + ((1, 1, 1, 1, 1), dict(c1=1, c2=1, d=1), True), + ((1, 1, 1, 1, 1), dict(c1=1, c2=1), True), + ((1, 1, 1, 1), dict(c1=1, c2=1, d=1), True), + ((1, 1, 1, 1), dict(c1=1, c2=1), True), + ((1, 1, 1, 1), dict(c1=1), False), + ((1, 1, 1), dict(c1=1, c2=1), False), + ((1, 1, 1, 1, 1), dict(c2=1), False), + ((1, 1, 1), dict(b2=1, c1=1, c2=1), True), + ((1, 1), dict(b1=1, b2=1, c1=1, c2=1), True), + ((1,), dict(a2=1, b1=1, b2=1, c1=1, c2=1), False), + ((), dict(a1=1, a2=1, b1=1, b2=1, c1=1, c2=1), False), + ], +) +def test_assert_all_params_set_explicitly( + args: Tuple[int, ...], kwargs: Dict[str, int], valid: bool +): + from bioimageio.spec._internal.utils import assert_all_params_set_explicitly + + def func( + a1: int = 0, + a2: int = 0, + /, + b1: int = 0, + b2: int = 0, + *args: Any, + c1: int = 0, + c2: int = 0, + **kwargs: Any, + ): + print(a1, a2, b1, b2, args, c1, c2, kwargs) + + func_explicit = assert_all_params_set_explicitly(func) + + func(*args, **kwargs) + + if valid: + ctxt = nullcontext() + else: + ctxt = pytest.raises(AssertionError) + + with ctxt: + func_explicit(*args, **kwargs) diff --git a/tests/test_internal/test_validate.py b/tests/test_internal/test_validate.py new file mode 100644 index 000000000..bf6ed98ad --- /dev/null +++ b/tests/test_internal/test_validate.py @@ -0,0 +1,37 @@ +from pathlib import Path + +import pytest +from pydantic import TypeAdapter, ValidationError +from typing_extensions import Annotated + +from bioimageio.spec._internal.io import WithSuffix +from bioimageio.spec._internal.types import FileSource +from bioimageio.spec._internal.validation_context import ValidationContext + + +def test_single_suffix(): + adapter = TypeAdapter(Annotated[FileSource, WithSuffix(".py", case_sensitive=True)]) + with ValidationContext(root=Path(__file__).parent): + _ = adapter.validate_python(Path(__file__).name) + + with ValidationContext(perform_io_checks=False): + _ = adapter.validate_python("https://example.com/lala.py") + _ = adapter.validate_python("https://example.com/lala.py#section") + + +def test_case_sensitive_suffix(): + adapter = TypeAdapter(Annotated[FileSource, WithSuffix(".py", case_sensitive=True)]) + with ValidationContext(perform_io_checks=False), pytest.raises(ValidationError): + _ = adapter.validate_python("https://example.com/lala.PY") + + +def test_multiple_suffix(): + adapter = TypeAdapter( + Annotated[FileSource, WithSuffix((".py", ".md"), case_sensitive=True)] + ) + with ValidationContext(root=Path(__file__).parent): + _ = adapter.validate_python(Path(__file__).name) + + with ValidationContext(perform_io_checks=False): + _ = adapter.validate_python("https://example.com/lala.py") + _ = adapter.validate_python("https://example.com/lala.md#section") diff --git a/tests/test_internal/test_version_type.py b/tests/test_internal/test_version_type.py new file mode 100644 index 000000000..53aeb934b --- /dev/null +++ b/tests/test_internal/test_version_type.py @@ -0,0 +1,19 @@ +from typing import Union + +import pytest + +from tests.utils import check_type + + +@pytest.mark.parametrize( + "value", [1, "1", 0.1, 1.0, "1.0", "0.0.1", "0.1.0", "0.1.1", "1.0.0", "1.1.1"] +) +def test_version_type(value: Union[str, int, float]): + from bioimageio.spec._internal.version_type import Version + + check_type( + Version, + value, + expected_root=value, + expected_deserialized=value, + ) diff --git a/tests/test_io.py b/tests/test_io.py index 85655b6d6..f31efd361 100644 --- a/tests/test_io.py +++ b/tests/test_io.py @@ -1,44 +1,12 @@ -import pathlib +from pathlib import Path import pytest -from bioimageio.spec.shared import yaml -SKIP_ZENODO = False -SKIP_ZENODO_REASON = "zenodo api changes" +def test_load_non_existing_rdf(): + from bioimageio.spec import load_description + spec_path = Path("some/none/existing/path/to/spec.model.yaml") -def test_get_resource_package_content(unet2d_nuclei_broad_latest, unet2d_nuclei_broad_url): - from bioimageio.spec import get_resource_package_content - - from_local_content = get_resource_package_content(unet2d_nuclei_broad_latest) - from_remote_content = get_resource_package_content(unet2d_nuclei_broad_url) - local_keys = set(from_local_content) - remote_keys = set(from_remote_content) - assert local_keys == remote_keys - - -@pytest.mark.skipif(SKIP_ZENODO, reason=SKIP_ZENODO_REASON) -def test_load_animal_nickname(): - from bioimageio.spec import load_raw_resource_description - from bioimageio.spec.model.v0_4.raw_nodes import Model as Model04 - - nickname = "impartial-shrimp" - model = load_raw_resource_description(nickname) - assert isinstance(model, Model04) - assert ".".join(model.format_version.split(".")[:2]) == "0.4" - assert model.config["bioimageio"]["nickname"] == nickname - - -def test_resolve_download_url(unet2d_nuclei_broad_latest): - from bioimageio.spec import load_raw_resource_description - from bioimageio.spec.model.v0_4.raw_nodes import Model as Model04 - - assert yaml is not None - data = yaml.load(unet2d_nuclei_broad_latest) - data["root_path"] = unet2d_nuclei_broad_latest.parent # set root path manually as we load from the manipulated dict - data["download_url"] = "relative_path_to.zip" - - model = load_raw_resource_description(data) - assert isinstance(model, Model04) - assert isinstance(model.download_url, pathlib.Path) + with pytest.raises(FileNotFoundError): + _ = load_description(spec_path) diff --git a/tests/test_licenses.py b/tests/test_licenses.py deleted file mode 100644 index b74c7e159..000000000 --- a/tests/test_licenses.py +++ /dev/null @@ -1,7 +0,0 @@ -from bioimageio.spec.shared import LICENSES - - -def test_licenses_read_and_reformatted(): - """Make sure LICENSES is dict of dicts""" - assert isinstance(LICENSES, dict) - assert all(isinstance(v, dict) for k, v in LICENSES.items()) diff --git a/tests/test_model/__init__.py b/tests/test_model/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/test_model/test_v0_4.py b/tests/test_model/test_v0_4.py new file mode 100644 index 000000000..3e00ac3a0 --- /dev/null +++ b/tests/test_model/test_v0_4.py @@ -0,0 +1,531 @@ +from datetime import datetime +from typing import Any, Dict, Union + +import pytest +from pydantic import ValidationError + +from bioimageio.spec._description import validate_format +from bioimageio.spec._internal.root_url import RootHttpUrl +from bioimageio.spec._internal.validation_context import ValidationContext +from bioimageio.spec.model.v0_4 import ( + Author, + CiteEntry, + Datetime, + InputTensorDescr, + LinkedModel, + Maintainer, + ModelDescr, + OnnxWeightsDescr, + OutputTensorDescr, + PostprocessingDescr, + PreprocessingDescr, + ScaleLinearKwargs, + ScaleMeanVarianceDescr, + ScaleRangeDescr, + ScaleRangeKwargs, + TensorName, + WeightsDescr, +) +from tests.conftest import UNET2D_ROOT +from tests.utils import check_node, check_type, unset + + +def test_linked_model_lala(): + check_node( + LinkedModel, + dict(id="lala"), + expected_dump_json=dict(id="lala"), + expected_dump_python=dict(id="lala"), + ) + + +@pytest.mark.parametrize( + "kwargs", + [ + dict(url="https://example.com"), + dict(id="lala", uri="https://example.com"), + ], +) +def test_linked_model_invalid(kwargs: Dict[str, Any]): + check_node(LinkedModel, kwargs, is_invalid=True) + + +@pytest.mark.parametrize( + "kwargs,expected", + [ + ( + dict(source=UNET2D_ROOT / "weights.onnx", sha256="s" * 64), + dict(source=UNET2D_ROOT / "weights.onnx", sha256="s" * 64), + ), + ( + dict(opset_version=5, source=UNET2D_ROOT / "weights.onnx", sha256="s" * 64), + ValidationError, + ), + ( + dict(source=UNET2D_ROOT / "weights.onnx", sha256="s"), + ValidationError, + ), + ], +) +def test_onnx_entry(kwargs: Dict[str, Any], expected: Union[Dict[str, Any], bool]): + check_node( + OnnxWeightsDescr, + kwargs, + expected_dump_json=expected if isinstance(expected, dict) else unset, + is_invalid=expected is ValidationError, + context=ValidationContext(perform_io_checks=False), + ) + + +VALID_PRE_AND_POSTPROCESSING = [ + dict(name="binarize", kwargs={"threshold": 0.5}), + dict(name="clip", kwargs={"min": 0.2, "max": 0.5}), + dict(name="scale_linear", kwargs={"gain": 2.0, "offset": 0.5, "axes": "xy"}), + dict(name="sigmoid"), + dict( + name="zero_mean_unit_variance", + kwargs={"mode": "fixed", "mean": 1.0, "std": 2.0, "axes": "xy"}, + ), + dict(name="scale_range", kwargs={"mode": "per_sample", "axes": "xy"}), + dict( + name="scale_range", + kwargs={ + "mode": "per_sample", + "axes": "xy", + "min_percentile": 5, + "max_percentile": 50, + }, + ), +] + +INVALID_PRE_AND_POSTPROCESSING = [ + dict(kwargs={"threshold": 0.5}), + dict(name="binarize", kwargs={"mode": "fixed", "threshold": 0.5}), + dict(name="clip", kwargs={"min": "min", "max": 0.5}), + dict(name="scale_linear", kwargs={"gain": 2.0, "offset": 0.5, "axes": "b"}), + dict(name="sigmoid", kwargs={"axes": "x"}), + dict( + name="zero_mean_unit_variance", + kwargs={"mode": "unknown", "mean": 1.0, "std": 2.0, "axes": "xy"}, + ), + dict(name="scale_range", kwargs={"mode": "fixed", "axes": "xy"}), + dict( + name="scale_range", + kwargs={ + "mode": "per_sample", + "axes": "xy", + "min_percentile": 50, + "max_percentile": 50, + }, + ), + dict(name="scale_range", kwargs={"mode": "per_sample", "axes": "xy", "min": 0}), +] + + +@pytest.mark.parametrize( + "kwargs", + VALID_PRE_AND_POSTPROCESSING + + [ + dict( + name="scale_range", + kwargs={ + "mode": "per_dataset", + "axes": "xy", + "reference_tensor": "some_input_tensor_name", + }, + ), + ], +) +def test_preprocessing(kwargs: Dict[str, Any]): + check_type(PreprocessingDescr, kwargs, expected_deserialized=kwargs) + + +@pytest.mark.parametrize("kwargs", INVALID_PRE_AND_POSTPROCESSING) +def test_invalid_preprocessing(kwargs: Dict[str, Any]): + check_type(PreprocessingDescr, kwargs, is_invalid=True) + + +@pytest.mark.parametrize( + "kwargs", + VALID_PRE_AND_POSTPROCESSING + + [ + dict(name="scale_range", kwargs={"mode": "per_sample", "axes": "xy"}), + dict( + name="scale_range", + kwargs={ + "mode": "per_dataset", + "axes": "xy", + "reference_tensor": "some_input_tensor_name", + }, + ), + dict( + name="scale_mean_variance", + kwargs={"mode": "per_sample", "reference_tensor": "some_tensor_name"}, + ), + dict( + name="scale_mean_variance", + kwargs={"mode": "per_dataset", "reference_tensor": "some_tensor_name"}, + ), + ], +) +def test_postprocessing(kwargs: Dict[str, Any]): + check_type(PostprocessingDescr, kwargs, expected_deserialized=kwargs) + + +@pytest.mark.parametrize( + "node,expected", + [ + ( + ScaleRangeDescr(kwargs=ScaleRangeKwargs(mode="per_sample", axes="xy")), + dict(name="scale_range", kwargs={"mode": "per_sample", "axes": "xy"}), + ), + ( + ScaleMeanVarianceDescr(kwargs={"mode": "per_dataset", "reference_tensor": "some_tensor_name"}), # type: ignore + dict( + name="scale_mean_variance", + kwargs={"mode": "per_dataset", "reference_tensor": "some_tensor_name"}, + ), + ), + ], +) +def test_postprocessing_node_input(node: Any, expected: Dict[str, Any]): + check_type(PostprocessingDescr, node, expected_deserialized=expected) + + +@pytest.mark.parametrize( + "kwargs", + INVALID_PRE_AND_POSTPROCESSING + + [ + dict(name="scale_mean_variance", kwargs={"mode": "per_sample"}), + dict(name="scale_mean_variance", kwargs={"mode": "per_dataset"}), + ], +) +def test_invalid_postprocessing(kwargs: Dict[str, Any]): + check_type(PostprocessingDescr, kwargs, is_invalid=True) + + +@pytest.mark.parametrize( + "kwargs,valid", + [ + (dict(axes="xy", gain=2.0, offset=0.5), True), + (dict(offset=2.0), True), + (dict(gain=2.0), True), + (dict(axes="xy", gain=[1.0, 2.0], offset=[0.5, 0.3]), True), + (dict(gain=2.0, offset=0.5), True), + (dict(), False), # type: ignore + (dict(gain=1.0), False), + (dict(offset=0.0), False), + ], +) +def test_scale_linear_kwargs(kwargs: Dict[str, Any], valid: bool): + check_node(ScaleLinearKwargs, kwargs, is_invalid=not valid) + + +@pytest.mark.parametrize( + "kwargs", + [ + { + "name": "input_1", + "description": "Input 1", + "data_type": "float32", + "axes": "xyc", + "shape": [128, 128, 3], + "preprocessing": [ + { + "name": "scale_range", + "kwargs": { + "max_percentile": 99, + "min_percentile": 5, + "mode": "per_sample", + "axes": "xy", + }, + } + ], + }, + { + "name": "input_1", + "description": "Input 1", + "data_type": "float32", + "axes": "xyc", + "shape": [128, 128, 3], + }, + { + "name": "tensor_1", + "data_type": "float32", + "axes": "xyc", + "shape": [128, 128, 3], + }, + ], +) +def test_input_tensor(kwargs: Dict[str, Any]): + check_node(InputTensorDescr, kwargs) + + +@pytest.mark.parametrize( + "kwargs", + [ + { + "name": "output_1", + "description": "Output 1", + "data_type": "float32", + "axes": "xyc", + "shape": [128, 128, 3], + "postprocessing": [ + { + "name": "scale_range", + "kwargs": { + "max_percentile": 99, + "min_percentile": 5, + "mode": "per_sample", + "axes": "xy", + }, + } + ], + }, + { + "name": "output_1", + "description": "Output 1", + "data_type": "float32", + "axes": "xyc", + "shape": [128, 128, 3], + }, + { + "name": "tensor_1", + "data_type": "float32", + "axes": "xyc", + "shape": [128, 128, 3], + }, + ], +) +def test_output_tensor(kwargs: Dict[str, Any]): + check_node(OutputTensorDescr, kwargs) + + +@pytest.fixture +def model_data(): + with ValidationContext(perform_io_checks=False): + return ModelDescr( + documentation=UNET2D_ROOT / "README.md", + license="MIT", + git_repo="https://github.com/bioimage-io/python-bioimage-io", + description="description", + authors=[ + Author(name="Author 1", affiliation="Affiliation 1"), + Author(name="Author 2"), + ], + maintainers=[ + Maintainer( + name="Maintainer 1", + affiliation="Affiliation 1", + github_user="fynnbe", + ), + Maintainer(github_user="constantinpape"), + ], + timestamp=Datetime(datetime.now()), + cite=[CiteEntry(text="Paper title", url="https://example.com/")], + inputs=[ + InputTensorDescr( + name=TensorName("input_1"), + description="Input 1", + data_type="float32", + axes="xyc", + shape=(128, 128, 3), + ), + ], + outputs=[ + OutputTensorDescr( + name=TensorName("output_1"), + description="Output 1", + data_type="float32", + axes="xyc", + shape=(128, 128, 3), + ), + ], + name="Model", + tags=[], + weights=WeightsDescr( + onnx=OnnxWeightsDescr(source=UNET2D_ROOT / "weights.onnx") + ), + test_inputs=[UNET2D_ROOT / "test_input.npy"], + test_outputs=[UNET2D_ROOT / "test_output.npy"], + type="model", + ).model_dump(mode="json") + + +@pytest.mark.parametrize( + "update", + [ + dict(run_mode={"name": "special_run_mode", "kwargs": dict(marathon=True)}), + dict(name="ยต-unicode-model!"), + dict(weights={"torchscript": {"source": "local_weights"}}), + dict(weights={"keras_hdf5": {"source": "local_weights"}}), + dict(weights={"tensorflow_js": {"source": "local_weights"}}), + dict(weights={"tensorflow_saved_model_bundle": {"source": "local_weights"}}), + dict(weights={"onnx": {"source": "local_weights"}}), + dict( + weights={ + "pytorch_state_dict": { + "source": "local_weights", + "architecture": "file.py:Model", + "architecture_sha256": "0" * 64, + } + } + ), + ], +) +def test_model(model_data: Dict[str, Any], update: Dict[str, Any]): + model_data.update(update) + summary = validate_format( + model_data, context=ValidationContext(perform_io_checks=False) + ) + assert summary.status == "passed", summary.format() + + +def test_warn_long_name(model_data: Dict[str, Any]): + model_data["name"] = ( + "veeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeery loooooooooooooooong name" + ) + summary = validate_format( + model_data, context=ValidationContext(perform_io_checks=False) + ) + assert summary.status == "passed", summary.format() + assert summary.details[1].warnings[0].loc == ("name",), summary.format() + assert summary.details[1].warnings[0].msg == "Name longer than 64 characters." + + +def test_model_schema_raises_invalid_input_name(model_data: Dict[str, Any]): + model_data["inputs"][0]["name"] = "invalid/name" + summary = validate_format( + model_data, + context=ValidationContext( + root=RootHttpUrl("http://example.com/"), perform_io_checks=False + ), + ) + assert summary.status == "failed", summary.format() + + +def test_output_fixed_shape_too_small(model_data: Dict[str, Any]): + model_data["outputs"] = [ + { + "name": "output_1", + "description": "Output 1", + "data_type": "float32", + "axes": "xyc", + "shape": [128, 128, 3], + "halo": [32, 128, 0], + } + ] + + summary = validate_format( + model_data, + context=ValidationContext( + root=RootHttpUrl("http://example.com/"), perform_io_checks=False + ), + ) + assert summary.status == "failed", summary.format() + + +def test_output_ref_shape_mismatch(model_data: Dict[str, Any]): + model_data["outputs"] = [ + { + "name": "output_1", + "description": "Output 1", + "data_type": "float32", + "axes": "xyc", + "shape": { + "reference_tensor": "input_1", + "scale": [1, 2, 3, 4], + "offset": [0, 0, 0, 0], + }, + } + ] + + summary = validate_format( + model_data, + context=ValidationContext( + root=RootHttpUrl("http://example.com/"), perform_io_checks=False + ), + ) + assert summary.status == "failed", summary.format() + + +def test_output_ref_shape_too_small(model_data: Dict[str, Any]): + model_data["outputs"] = [ + { + "name": "output_1", + "description": "Output 1", + "data_type": "float32", + "axes": "xyc", + "shape": { + "reference_tensor": "input_1", + "scale": [1, 2, 3], + "offset": [0, 0, 0], + }, + "halo": [256, 128, 0], + } + ] + summary = validate_format( + model_data, + context=ValidationContext( + root=RootHttpUrl("http://example.com/"), perform_io_checks=False + ), + ) + assert summary.status == "failed", summary.format() + + +def test_model_has_parent_with_id(model_data: Dict[str, Any]): + model_data["parent"] = dict(id="10.5281/zenodo.5764892") + summary = validate_format( + model_data, + context=ValidationContext( + root=RootHttpUrl("http://example.com/"), perform_io_checks=False + ), + ) + assert summary.status == "passed", summary.format() + + +def test_model_with_expanded_output(model_data: Dict[str, Any]): + model_data["outputs"] = [ + { + "name": "output_1", + "description": "Output 1", + "data_type": "float32", + "axes": "xyzc", + "shape": dict( + scale=[1, 1, None, 1], + offset=[0, 0, 7, 0], + reference_tensor="input_1", + ), + } + ] + + summary = validate_format( + model_data, + context=ValidationContext( + root=RootHttpUrl("http://example.com/"), perform_io_checks=False + ), + ) + assert summary.status == "passed", summary.format() + + +def test_model_rdf_is_valid_general_rdf(model_data: Dict[str, Any]): + model_data["type"] = "model_as_generic" + model_data["format_version"] = "0.2.4" + summary = validate_format( + model_data, + context=ValidationContext( + root=RootHttpUrl("http://example.com/"), perform_io_checks=False + ), + ) + assert summary.status == "passed", summary.format() + + +def test_model_does_not_accept_unknown_fields(model_data: Dict[str, Any]): + model_data["unknown_additional_field"] = "shouldn't be here" + summary = validate_format( + model_data, + context=ValidationContext( + root=RootHttpUrl("http://example.com/"), perform_io_checks=False + ), + ) + assert summary.status == "failed", summary.format() diff --git a/tests/test_model/test_v0_5.py b/tests/test_model/test_v0_5.py new file mode 100644 index 000000000..d13e976d5 --- /dev/null +++ b/tests/test_model/test_v0_5.py @@ -0,0 +1,434 @@ +from datetime import datetime +from typing import Any, Dict, Union + +import pytest + +from bioimageio.spec import validate_format +from bioimageio.spec._internal.io import FileDescr +from bioimageio.spec._internal.license_id import LicenseId +from bioimageio.spec._internal.url import HttpUrl +from bioimageio.spec._internal.validation_context import ValidationContext +from bioimageio.spec.model.v0_5 import ( + Author, + AxisId, + BatchAxis, + ChannelAxis, + CiteEntry, + Datetime, + Identifier, + InputAxis, + InputTensorDescr, + IntervalOrRatioDataDescr, + Maintainer, + ModelDescr, + OnnxWeightsDescr, + OutputTensorDescr, + SpaceInputAxis, + SpaceOutputAxis, + TensorDescrBase, + TensorId, + WeightsDescr, +) +from tests.conftest import UNET2D_ROOT +from tests.utils import check_node, check_type + + +@pytest.mark.parametrize( + "kwargs", + [ + dict( + id="t0", + test_tensor={"source": UNET2D_ROOT / "test_input.npy"}, + data={"values": ["cat", "dog", "parrot"]}, + axes=[{"type": "channel", "channel_names": ["animal"]}], + ), + dict( + id="t1", + test_tensor={"source": UNET2D_ROOT / "test_input.npy"}, + data=[ + {"values": ["cat", "dog", "parrot"]}, + {"values": ["mouse", "zebra", "elephant"]}, + ], + axes=[{"type": "channel", "channel_names": ["animal", "other_animal"]}], + ), + dict( + id="t2", + test_tensor={"source": UNET2D_ROOT / "test_input.npy"}, + data=[ + {"values": [1, 2, 3]}, + {"type": "uint8"}, + ], + axes=[ + {"type": "channel", "channel_names": ["animal_code", "animal_count"]} + ], + ), + pytest.param( + dict( + id="t3", + test_tensor={"source": UNET2D_ROOT / "test_input.npy"}, + data=[ + {"values": ["mouse", "zebra", "elephant"]}, + {"type": "uint8"}, + ], + axes=[{"type": "channel", "channel_names": ["animal_code", "count"]}], + ), + id="string values and uint data type", + ), + ], +) +def test_tensor_base(kwargs: Dict[str, Any]): + check_node( + TensorDescrBase, kwargs, context=ValidationContext(perform_io_checks=False) + ) + + +@pytest.mark.parametrize( + "kwargs", + [ + pytest.param( + dict( + id="t5", + test_tensor={"source": UNET2D_ROOT / "test_input.npy"}, + data=[ + {"values": ["cat", "dog", "parrot"]}, + {"values": [1.1, 2.2, 3.3]}, + ], + ), + id="str and float values", + ), + pytest.param( + dict( + id="t7", + test_tensor={"source": UNET2D_ROOT / "test.npy"}, + data=[ + {"values": ["mouse", "zebra", "elephant"]}, + {"type": "int8"}, + ], + ), + id="string values and int data type", + ), + ], +) +def test_tensor_base_invalid(kwargs: Dict[str, Any]): + check_node( + TensorDescrBase, + kwargs, + is_invalid=True, + context=ValidationContext(perform_io_checks=False), + ) + + +@pytest.mark.parametrize( + "kwargs", + [ + { + "id": "input_1", + "description": "Input 1", + "data": {"type": "float32"}, + "axes": [ + dict(type="space", id="x", size=10), + dict(type="space", id="y", size=11), + dict(type="channel", channel_names=tuple("abc")), + ], + "preprocessing": [ + { + "id": "scale_range", + "kwargs": { + "max_percentile": 99, + "min_percentile": 5, + "axes": ("x", "y"), + }, + } + ], + "test_tensor": {"source": UNET2D_ROOT / "test_input.npy"}, + }, + ], +) +def test_input_tensor(kwargs: Dict[str, Any]): + check_node( + InputTensorDescr, kwargs, context=ValidationContext(perform_io_checks=False) + ) + + +@pytest.mark.parametrize( + "kwargs", + [ + pytest.param( + dict( + id="input_2", + test_tensor={"source": UNET2D_ROOT / "test.npy"}, + data=[ + {"values": ["cat", "dog", "parrot"]}, + {"values": ["mouse", "zebra", "elephant"]}, + ], + axes=[{"type": "channel", "channel_names": ["a", "b", "c"]}], + ), + id="channel mismatch", + ), + ], +) +def test_input_tensor_invalid(kwargs: Dict[str, Any]): + check_node( + InputTensorDescr, + kwargs, + is_invalid=True, + context=ValidationContext(perform_io_checks=False), + ) + + +@pytest.mark.parametrize( + "kwargs", + [{}, {"type": "batch"}], +) +def test_batch_axis(kwargs: Dict[str, Any]): + check_node( + BatchAxis, + kwargs, + expected_dump_python={ + "type": "batch", + "name": "batch", + "description": "", + "size": None, + }, + ) + + +@pytest.mark.parametrize( + "kwargs", + [ + {"type": "space", "id": "x", "size": 10}, + SpaceInputAxis(id=AxisId("x"), size=10), + {"type": "batch"}, + ], +) +def test_input_axis(kwargs: Union[Dict[str, Any], SpaceInputAxis]): + check_type(InputAxis, kwargs) + + +@pytest.fixture +def model_data(): + with ValidationContext(perform_io_checks=False): + model = ModelDescr( + documentation=UNET2D_ROOT / "README.md", + license=LicenseId("MIT"), + git_repo=HttpUrl("https://github.com/bioimage-io/python-bioimage-io"), + format_version="0.5.0", + description="description", + authors=[ + Author(name="Author 1", affiliation="Affiliation 1"), + Author(name="Author 2"), + ], + maintainers=[ + Maintainer( + name="Maintainer 1", + affiliation="Affiliation 1", + github_user="fynnbe", + ), + Maintainer(github_user="githubuser2"), + ], + timestamp=Datetime(datetime.now()), + cite=[CiteEntry(text="Paper title", url=HttpUrl("https://example.com/"))], + inputs=[ + InputTensorDescr( + id=TensorId("input_1"), + description="Input 1", + data=IntervalOrRatioDataDescr(type="float32"), + axes=[ + BatchAxis(), + ChannelAxis(channel_names=[Identifier("intensity")]), + SpaceInputAxis(id=AxisId("x"), size=512), + SpaceInputAxis(id=AxisId("y"), size=512), + ], + test_tensor=FileDescr(source=UNET2D_ROOT / "test_input.npy"), + ), + ], + outputs=[ + OutputTensorDescr( + id=TensorId("output_1"), + description="Output 1", + axes=[ + BatchAxis(), + ChannelAxis(channel_names=[Identifier("intensity")]), + SpaceOutputAxis(id=AxisId("x"), size=512), + SpaceOutputAxis(id=AxisId("y"), size=512), + ], + test_tensor=FileDescr(source=UNET2D_ROOT / "test_output.npy"), + ), + ], + name="Model", + tags=[], + weights=WeightsDescr( + onnx=OnnxWeightsDescr( + source=UNET2D_ROOT / "weights.onnx", opset_version=15 + ) + ), + type="model", + ) + data = model.model_dump(mode="json") + assert data["documentation"] == str(UNET2D_ROOT / "README.md"), ( + data["documentation"], + str(UNET2D_ROOT / "README.md"), + ) + return data + + +@pytest.mark.parametrize( + "update", + [ + pytest.param(dict(name="ยต-unicode-model/name!"), id="unicode name"), + dict(run_mode={"name": "special_run_mode", "kwargs": dict(marathon=True)}), + dict( + weights={ + "torchscript": { + "source": UNET2D_ROOT / "weights.onnx", + "pytorch_version": 1.15, + } + } + ), + dict( + weights={ + "keras_hdf5": { + "source": UNET2D_ROOT / "weights.onnx", + "tensorflow_version": 1.10, + } + } + ), + dict( + weights={ + "tensorflow_js": { + "source": UNET2D_ROOT / "weights.onnx", + "tensorflow_version": 1.10, + } + } + ), + dict( + weights={ + "tensorflow_saved_model_bundle": { + "source": UNET2D_ROOT / "weights.onnx", + "tensorflow_version": 1.10, + } + } + ), + dict( + weights={ + "onnx": {"source": UNET2D_ROOT / "weights.onnx", "opset_version": 15} + } + ), + dict( + weights={ + "pytorch_state_dict": { + "source": UNET2D_ROOT / "weights.onnx", + "pytorch_version": "1.15", + "architecture": { + "callable": "Model", + "source": "https://example.com/file.py", + "sha256": "0" * 64, # dummy sha256 + }, + }, + } + ), + ], +) +def test_model(model_data: Dict[str, Any], update: Dict[str, Any]): + model_data.update(update) + summary = validate_format( + model_data, context=ValidationContext(perform_io_checks=False) + ) + assert summary.status == "passed", summary.format() + + +def test_warn_long_name(model_data: Dict[str, Any]): + model_data["name"] = ( + "veeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeery loooooooooooooooong name" + ) + summary = validate_format(model_data) + + assert summary.status == "passed", summary.format() + assert summary.details[1].warnings[0].loc == ("name",), summary.format() + assert summary.details[1].warnings[0].msg == "Name longer than 64 characters." + + +def test_model_schema_raises_invalid_input_id(model_data: Dict[str, Any]): + model_data["inputs"][0]["id"] = "invalid/id" + summary = validate_format(model_data) + assert summary.status == "failed", summary.format() + + +def test_output_fixed_shape_too_small(model_data: Dict[str, Any]): + model_data["outputs"][0]["halo"] = 999 + summary = validate_format(model_data) + assert summary.status == "failed", summary.format() + + +def test_output_ref_shape_mismatch(model_data: Dict[str, Any]): + model_data["outputs"][0]["axes"][2] = { + "type": "space", + "id": "x", + "size": {"tensor_id": "input_1", "axis_id": "x"}, + "halo": 2, + } + summary = validate_format(model_data) + assert summary.status == "passed", summary.format() + # input_1.x -> input_1.z + model_data["outputs"][0]["axes"][2] = { + "type": "space", + "id": "x", + "size": {"tensor_id": "input_1", "axis_id": "z"}, + "halo": 2, + } + summary = validate_format( + model_data, context=ValidationContext(perform_io_checks=False) + ) + assert summary.status == "failed", summary.format() + + +def test_output_ref_shape_too_small(model_data: Dict[str, Any]): + model_data["outputs"][0]["axes"][2] = { + "type": "space", + "id": "x", + "size": {"tensor_id": "input_1", "axis_id": "x"}, + "halo": 2, + } + summary = validate_format( + model_data, context=ValidationContext(perform_io_checks=False) + ) + assert summary.status == "passed", summary.format() + + model_data["outputs"][0]["axes"][2]["halo"] = 999 + summary = validate_format( + model_data, context=ValidationContext(perform_io_checks=False) + ) + assert summary.status == "failed", summary.format() + + +def test_model_has_parent_with_id(model_data: Dict[str, Any]): + model_data["parent"] = dict(id="10.5281/zenodo.5764892", version_number=1) + summary = validate_format(model_data) + assert summary.status == "passed", summary.format() + + +def test_model_with_expanded_output(model_data: Dict[str, Any]): + model_data["outputs"][0]["axes"] = [ + {"type": "space", "id": "x", "size": {"tensor_id": "input_1", "axis_id": "x"}}, + {"type": "space", "id": "y", "size": {"tensor_id": "input_1", "axis_id": "y"}}, + {"type": "space", "id": "z", "size": 7}, + {"type": "channel", "channel_names": list("abc")}, + ] + + summary = validate_format( + model_data, context=ValidationContext(perform_io_checks=False) + ) + assert summary.status == "passed", summary.format() + + +def test_model_rdf_is_valid_general_rdf(model_data: Dict[str, Any]): + model_data["type"] = "model_as_generic" + model_data["format_version"] = "0.3.0" + summary = validate_format(model_data) + assert summary.status == "passed", summary.format() + + +def test_model_does_not_accept_unknown_fields(model_data: Dict[str, Any]): + model_data["unknown_additional_field"] = "shouldn't be here" + summary = validate_format(model_data) + assert summary.status == "failed", summary.format() diff --git a/tests/test_node_transformer.py b/tests/test_node_transformer.py deleted file mode 100644 index 0c104601b..000000000 --- a/tests/test_node_transformer.py +++ /dev/null @@ -1,107 +0,0 @@ -from dataclasses import dataclass -from pathlib import Path -from typing import Any - -import pytest -from packaging.version import Version - -from bioimageio.spec.shared import raw_nodes -from bioimageio.spec.shared.node_transformer import NodeTransformer, NodeVisitor, iter_fields -from bioimageio.spec.shared.raw_nodes import ResourceDescription - - -@dataclass -class MyNode(raw_nodes.RawNode): - field_a: str - field_b: int - - -def test_iter_fields(): - entry = MyNode("a", 42) - assert [("field_a", "a"), ("field_b", 42)] == list(iter_fields(entry)) - - -@dataclass -class Content: - data: str - - -class TestNodeVisitor: - @dataclass - class Tree(raw_nodes.RawNode): - left: Any - right: Any - - @dataclass - class URL: - url: str - - @pytest.fixture - def tree(self): - return self.Tree( - self.Tree(self.Tree(None, None), self.URL("https://example.com")), - self.Tree(None, self.Tree(None, self.Tree(None, None))), - ) - - def test_node(self, tree): - visitor = NodeVisitor() - visitor.visit(tree) - - def test_node_transform(self, tree): - class MyTransformer(NodeTransformer): - def transform_URL(self, node): - return Content(f"content of url {node.url}") - - assert isinstance(tree.left.right, self.URL) - transformer = MyTransformer() - transformed_tree = transformer.transform(tree) - assert isinstance(transformed_tree.left.right, Content) - - -def test_resolve_remote_relative_path(): - from bioimageio.spec.shared.node_transformer import RelativePathTransformer - - remote_rdf = raw_nodes.URI( - "https://raw.githubusercontent.com/bioimage-io/spec-bioimage-io/main/example_specs/models/" - "unet2d_nuclei_broad/rdf.yaml" - ) - remote_relative_path = Path("unet2d.py") - - uri = RelativePathTransformer(root=remote_rdf.parent).transform(remote_relative_path) - - assert ( - str(uri) == "https://raw.githubusercontent.com/bioimage-io/spec-bioimage-io/main/example_specs/models/" - "unet2d_nuclei_broad/unet2d.py" - ) - - -@pytest.mark.parametrize( - "data_update_expected", - [ - ({"a": 1, "b": 2}, {"a": {"c": 1}}, {"a": {"c": 1}, "b": 2}), - ({"a": [0, 1, 2, 3]}, {"a": [5]}, {"a": [5, 1, 2, 3]}), - ([0, 1, 2, 3], [5], [5, 1, 2, 3]), - ([0, {"a": [1]}, 2, 3], ["DROP", {"a": ["KEEP", 2]}], [{"a": [1, 2]}, 2, 3]), - ( - ResourceDescription( - format_version="0.1.0", - name="resource", - type="test", - version=Version("0.1.0"), - ), - {"name": "updated resource"}, - ResourceDescription( - format_version="0.1.0", - name="updated resource", - type="test", - version=Version("0.1.0"), - ), - ), - ], -) -def test_update_nested(data_update_expected): - from bioimageio.spec.shared import update_nested - - data, update, expected = data_update_expected - actual = update_nested(data, update) - assert actual == expected diff --git a/tests/test_package.py b/tests/test_package.py new file mode 100644 index 000000000..abb8f33ed --- /dev/null +++ b/tests/test_package.py @@ -0,0 +1,45 @@ +import shutil +from pathlib import Path +from tempfile import TemporaryDirectory + +from bioimageio.spec.model import v0_5 + + +def test_save_bioimageio_package(unet2d_path: Path): + from bioimageio.spec import save_bioimageio_package + + package_path = save_bioimageio_package(unet2d_path) + assert package_path.exists() + + +def test_save_bioimageio_package_as_folder(unet2d_path: Path): + from bioimageio.spec import load_description, save_bioimageio_package_as_folder + + with TemporaryDirectory() as tmp_dir: + tmp_dir = Path(tmp_dir) + + package_folder = tmp_dir / "package" + _ = save_bioimageio_package_as_folder(unet2d_path, output_path=package_folder) + + # load package + model = load_description(package_folder) + assert isinstance(model, v0_5.ModelDescr) + + # alter package + doc = model.documentation + assert isinstance(doc, v0_5.RelativeFilePath) + new_doc = f"copy_{doc}" + shutil.move(str(package_folder / str(doc)), str(package_folder / new_doc)) + model.documentation = package_folder / new_doc + + # export altered package + altered_package = tmp_dir / "altered_package" + altered_package = save_bioimageio_package_as_folder( + model, output_path=altered_package, weights_priority_order=["onnx"] + ) + assert (altered_package / new_doc).exists(), altered_package / new_doc + + # load altered package + reloaded_model = load_description(altered_package) + assert isinstance(reloaded_model, v0_5.ModelDescr) + assert str(reloaded_model.documentation).startswith("copy_") diff --git a/tests/test_raw_load_resource_description.py b/tests/test_raw_load_resource_description.py deleted file mode 100644 index 5cfbbdb53..000000000 --- a/tests/test_raw_load_resource_description.py +++ /dev/null @@ -1,136 +0,0 @@ -import pathlib - -from bioimageio.spec.model import raw_nodes -from bioimageio.spec import collection -from bioimageio.spec.shared import yaml - - -def test_load_raw_model(unet2d_nuclei_broad_any): - from bioimageio.spec import load_raw_resource_description - - raw_model = load_raw_resource_description(unet2d_nuclei_broad_any) - assert raw_model - - -def test_load_upsample_raw_model(upsamle_model_rdf): - from bioimageio.spec import load_raw_resource_description - - raw_model = load_raw_resource_description(upsamle_model_rdf) - assert raw_model - - -def test_loaded_remote_raw_model_is_valid(unet2d_nuclei_broad_url): - from bioimageio.spec import load_raw_resource_description - - raw_model = load_raw_resource_description(unet2d_nuclei_broad_url) - raw_model = load_raw_resource_description(raw_model) - assert raw_model - - -def test_load_raw_model_fixed_shape(unet2d_fixed_shape): - from bioimageio.spec import load_raw_resource_description - - raw_model = load_raw_resource_description(unet2d_fixed_shape) - assert raw_model - - -def test_load_raw_model_diff_output_shape(unet2d_diff_output_shape): - from bioimageio.spec import load_raw_resource_description - - raw_model = load_raw_resource_description(unet2d_diff_output_shape) - assert raw_model - - -def test_load_raw_model_multi_tensor(unet2d_multi_tensor): - from bioimageio.spec import load_raw_resource_description - - raw_model = load_raw_resource_description(unet2d_multi_tensor) - assert raw_model - - -def test_load_raw_model_expanded_output_shape(unet2d_expanded_output_shape): - from bioimageio.spec import load_raw_resource_description - - raw_model = load_raw_resource_description(unet2d_expanded_output_shape) - assert raw_model - - -def test_load_raw_model_hpa(hpa_model): - from bioimageio.spec import load_raw_resource_description - - raw_model = load_raw_resource_description(hpa_model) - assert raw_model - - -def test_load_raw_model_stardist(stardist_model): - from bioimageio.spec import load_raw_resource_description - - raw_model = load_raw_resource_description(stardist_model) - assert raw_model - - -def test_load_raw_model_unet2d_keras_tf(unet2d_keras_tf): - from bioimageio.spec import load_raw_resource_description - - raw_model = load_raw_resource_description(unet2d_keras_tf, update_to_format="latest") - assert isinstance(raw_model, raw_nodes.Model) - # test attachments - assert len(raw_model.attachments.files) == 1 - attachment = raw_model.attachments.files[0] - assert isinstance(attachment, pathlib.Path) - assert (raw_model.root_path / attachment).exists() - - -def test_load_raw_model_unet2d_keras_tf2(unet2d_keras_tf2): - from bioimageio.spec import load_raw_resource_description - - raw_model = load_raw_resource_description(unet2d_keras_tf2, update_to_format="latest") - assert isinstance(raw_model, raw_nodes.Model) - # test attachments - assert len(raw_model.attachments.files) == 3 - attachments = raw_model.attachments.files - assert all(isinstance(at, pathlib.Path) and (raw_model.root_path / at).exists() for at in attachments) - - -def test_load_raw_model_to_format(unet2d_nuclei_broad_before_latest): - from bioimageio.spec import load_raw_resource_description - - assert yaml is not None - data = yaml.load(unet2d_nuclei_broad_before_latest) - data["root_path"] = unet2d_nuclei_broad_before_latest.parent - format_targets = [(0, 3), (0, 4)] - format_version = tuple(map(int, data["format_version"].split(".")[:2])) - - for target in format_targets: - if format_version <= target: - to_format = ".".join(map(str, target)) - raw_model = load_raw_resource_description(data, update_to_format=to_format) - assert raw_model.format_version[: raw_model.format_version.rfind(".")] == to_format - - -def test_load_raw_model_converts_invalid_name(unet2d_nuclei_broad_base_path): - from bioimageio.spec.model.raw_nodes import Model - from bioimageio.spec import load_raw_resource_description - - assert yaml is not None - model_dict = yaml.load(unet2d_nuclei_broad_base_path / "rdf_v0_4_0.yaml") - model_dict["root_path"] = unet2d_nuclei_broad_base_path - model_dict["name"] = "invalid/name" - model = load_raw_resource_description(model_dict) - assert isinstance(model, Model) - assert model.name == "invalidname" - - -def test_collection_with_relative_path_in_rdf_source_of_an_entry(partner_collection): - from bioimageio.spec import load_raw_resource_description - from bioimageio.spec.collection.utils import resolve_collection_entries - from bioimageio.spec.dataset.v0_2.raw_nodes import Dataset - - coll = load_raw_resource_description(partner_collection) - assert isinstance(coll, collection.raw_nodes.Collection) - resolved_entries = resolve_collection_entries(coll) - for entry_rdf, entry_error in resolved_entries: - assert isinstance(entry_rdf, Dataset) - assert isinstance(entry_rdf.documentation, pathlib.Path) and entry_rdf.documentation.as_posix().endswith( - "example_specs/collections/partner_collection/datasets/dummy-dataset/README.md" - ) diff --git a/tests/test_raw_nodes.py b/tests/test_raw_nodes.py deleted file mode 100644 index 737c437c4..000000000 --- a/tests/test_raw_nodes.py +++ /dev/null @@ -1,129 +0,0 @@ -import dataclasses -import pathlib -from datetime import datetime -from typing import Any, Dict - -import pytest - - -def test_uri(): - from bioimageio.spec.shared.raw_nodes import URI - - uri_from_string = URI("https://john.doe@www.example.com:123/forum/questions/?tag=networking&order=newest#top") - uri = URI( - scheme="https", - authority="john.doe@www.example.com:123", - path="/forum/questions/", - query="tag=networking&order=newest", - fragment="top", - ) - - assert str(uri_from_string) == str(uri) - assert uri_from_string == uri - - -def test_replace_uri_wo_uri_string(): - from bioimageio.spec.shared.raw_nodes import URI - - uri_string = "https://john.doe@www.example.com:123/forum/questions/?tag=networking&order=newest#top" - uri_string = uri_string.replace("top", "bottom") - - uri = URI( - scheme="https", - authority="john.doe@www.example.com:123", - path="/forum/questions/", - query="tag=networking&order=newest", - fragment="top", - ) - uri = dataclasses.replace(uri, fragment="bottom") - assert uri_string == str(uri) - - -def test_uri_is_relative_path(): - from bioimageio.spec.shared.raw_nodes import URI - - # todo: figure out if it is important to keep a trailing slash. - # atm uri_from_string removes it (using urllib.parse.urlparse) - # uri_from_string = URI("file:forum/questions/") - # uri = URI(scheme="file", path="forum/questions/") - - uri_from_string = URI("file:forum/questions") - uri = URI(scheme="file", path="forum/questions") - - assert str(uri_from_string) == str(uri) - assert uri_from_string == uri - - -def test_uri_is_url(): - from bioimageio.spec.shared.raw_nodes import URI - - url = "https://raw.githubusercontent.com/bioimage-io/spec-bioimage-io/main/example_specs/models/unet2d_nuclei_broad/test_input.npy" - uri = URI(url) - assert str(uri) == url - - -def test_uri_truediv(): - from bioimageio.spec.shared.raw_nodes import URI - - url = "https://raw.githubusercontent.com/bioimage-io/spec-bioimage-io/main/example_specs/models/unet2d_nuclei_broad?download=1" - rel_path = "test_input.npy" - expected = URI( - f"https://raw.githubusercontent.com/bioimage-io/spec-bioimage-io/main/example_specs/models/unet2d_nuclei_broad/{rel_path}?download=1" - ) - uri = URI(url) - assert expected == uri / rel_path - assert expected == uri / rel_path # ensure we did not change uri in-place - - -def test_uri_parent(): - from bioimageio.spec.shared.raw_nodes import URI - - url = "https://raw.githubusercontent.com/bioimage-io/spec-bioimage-io/main/example_specs/models/unet2d_nuclei_broad/test_input.npy?download=1" - expected = URI( - "https://raw.githubusercontent.com/bioimage-io/spec-bioimage-io/main/example_specs/models/unet2d_nuclei_broad?download=1" - ) - uri = URI(url) - assert expected == uri.parent - assert expected == uri.parent # ensure we did not change uri in-place - - -def test_general_rdf_accepts_unknown_fields(): - from bioimageio.spec.rdf.raw_nodes import RDF - - rdf = RDF( - format_version="0.2.0", - name="test_rdf", - authors=[], - cite=[], - description="description text", - documentation=pathlib.Path("README.md"), - links=[], - tags=[], - unknown_weird_test_field="shouldn't be here", - ) - assert rdf.name == "test_rdf" - - -def test_model_does_not_accept_unknown_fields(): - from bioimageio.spec.model.raw_nodes import Model - - model_kwargs: Dict[str, Any] = dict( - authors=[], - cite=[], - description="description text", - documentation=pathlib.Path("README.md"), - format_version="0.3.2", - inputs=[], - license="MIT", - name="test_model", - outputs=[], - tags=[], - test_inputs=[], - test_outputs=[], - timestamp=datetime.now(), - weights={}, - ) - # check that model_kwargs are valid - Model(**model_kwargs) - with pytest.raises(TypeError): - Model(**model_kwargs, unknown_weird_test_field="shouldn't be here") # type: ignore diff --git a/tests/test_raw_nodes_match_schema.py b/tests/test_raw_nodes_match_schema.py deleted file mode 100644 index 0c75265b9..000000000 --- a/tests/test_raw_nodes_match_schema.py +++ /dev/null @@ -1,44 +0,0 @@ -from inspect import getmembers - -import pytest - -from bioimageio.spec.shared import fields -from bioimageio import spec - - -@pytest.mark.parametrize( - "schema_raw_nodes_pair", - [ - (spec.model.schema, spec.model.raw_nodes), - (spec.model.v0_3.schema, spec.model.v0_3.raw_nodes), - (spec.model.v0_4.schema, spec.model.v0_4.raw_nodes), - (spec.rdf.schema, spec.rdf.raw_nodes), - (spec.rdf.v0_2.schema, spec.rdf.v0_2.raw_nodes), - ], -) -def test_model_spec(schema_raw_nodes_pair): - schema, raw_nodes = schema_raw_nodes_pair - from bioimageio.spec.shared.schema import SharedBioImageIOSchema - from bioimageio.spec.shared.raw_nodes import RawNode - - schema_names = { - name for name, cls in getmembers(schema) if isinstance(cls, type) and issubclass(cls, SharedBioImageIOSchema) - } - # remove SharedBioImageIOSchema from schema names - schema_names -= {SharedBioImageIOSchema.__name__} - assert schema_names # did we get any? - - node_names = {name for name, cls in getmembers(raw_nodes) if isinstance(cls, type) and issubclass(cls, RawNode)} - # remove any node_names that are fields - field_names = { - name for name, cls in getmembers(fields) if isinstance(cls, type) and issubclass(cls, fields.DocumentedField) - } - assert field_names # did we get any? - node_names -= field_names - # if present, ignore raw_nodes.ImportableModule and raw_nodes.ImportableSourceFile which are coming from - # fields.ImportableSource - node_names -= {n for n in {"ImportableModule", "ImportableSourceFile"} if hasattr(raw_nodes, n)} - - assert node_names # did we get any? - - assert node_names, schema_names diff --git a/tests/test_schema_model.py b/tests/test_schema_model.py deleted file mode 100644 index b3293c05b..000000000 --- a/tests/test_schema_model.py +++ /dev/null @@ -1,263 +0,0 @@ -from datetime import datetime - -import pytest -from marshmallow import ValidationError - -from bioimageio.spec.model.v0_4 import raw_nodes as raw_nodes_m04 -from bioimageio.spec.shared import yaml - -SKIP_ZENODO = False -SKIP_ZENODO_REASON = "zenodo api changes" - - -def test_model_rdf_is_valid_general_rdf(unet2d_nuclei_broad_latest): - from bioimageio.spec.rdf.schema import RDF - - assert yaml is not None - data = yaml.load(unet2d_nuclei_broad_latest) - data["root_path"] = unet2d_nuclei_broad_latest.parent - - RDF().load(data) - - -def test_model_does_not_accept_unknown_fields(unet2d_nuclei_broad_latest): - from bioimageio.spec.model.schema import Model - - assert yaml is not None - data = yaml.load(unet2d_nuclei_broad_latest) - data["root_path"] = unet2d_nuclei_broad_latest.parent - - data["unknown_additional_field"] = "shouldn't be here" - - with pytest.raises(ValidationError): - Model().load(data) - - -@pytest.fixture -def model_dict(): - """ - Valid model dict fixture - """ - return { - "documentation": "./docs.md", - "license": "MIT", - "git_repo": "https://github.com/bioimage-io/python-bioimage-io", - "format_version": "0.4.0", - "description": "description", - "authors": [ - {"name": "Author 1", "affiliation": "Affiliation 1"}, - {"name": "Author 2", "affiliation": "Affiliation 2"}, - ], - "maintainers": [ - {"name": "Author 1", "affiliation": "Affiliation 1", "github_user": "githubuser1"}, - {"name": "Author 2", "affiliation": "Affiliation 2", "github_user": "githubuser2"}, - ], - "timestamp": datetime.now(), - "cite": [{"text": "Paper title", "doi": "doi"}], - "inputs": [ - {"name": "input_1", "description": "Input 1", "data_type": "float32", "axes": "xyc", "shape": [128, 128, 3]} - ], - "outputs": [ - { - "name": "output_1", - "description": "Output 1", - "data_type": "float32", - "axes": "xyc", - "shape": [128, 128, 3], - } - ], - "name": "Model", - "tags": [], - "weights": {}, - "test_inputs": ["test_ipt.npy"], - "test_outputs": ["test_out.npy"], - "type": "model", - } - - -def test_model_schema_accepts_run_mode(model_dict): - from bioimageio.spec.model.schema import Model - - model_schema = Model() - model_dict.update({"run_mode": {"name": "special_run_mode", "kwargs": dict(marathon=True)}}) - validated_data = model_schema.load(model_dict) - assert validated_data - - -@pytest.mark.parametrize( - "format", - ["pytorch_state_dict", "torchscript", "keras_hdf5", "tensorflow_js", "tensorflow_saved_model_bundle", "onnx"], -) -def test_model_schema_accepts_valid_weight_formats(model_dict, format): - from bioimageio.spec.model.schema import Model - - model_schema = Model() - model_dict.update({"weights": {format: {"source": "local_weights"}}}) - if format == "pytorch_state_dict": - model_dict["weights"][format]["architecture"] = "file.py:Model" - model_dict["weights"][format]["architecture_sha256"] = "0" * 64 # dummy sha256 - - validated_data = model_schema.load(model_dict) - assert validated_data - - -def test_model_schema_raises_invalid_name(model_dict): - from bioimageio.spec.model.schema import Model - - model_schema = Model() - model_dict["name"] = "invalid/name" - with pytest.raises(ValidationError): - model_schema.load(model_dict) - - -def test_model_schema_raises_invalid_input_name(model_dict): - from bioimageio.spec.model.schema import Model - - model_schema = Model() - model_dict["inputs"][0]["name"] = "invalid/name" - with pytest.raises(ValidationError): - model_schema.load(model_dict) - - -def test_model_schema_raises_invalid_output_name(model_dict): - from bioimageio.spec.model.schema import Model - - model_schema = Model() - model_dict["outputs"][0]["name"] = "invalid/name" - with pytest.raises(ValidationError): - model_schema.load(model_dict) - - -def test_model_0_4_raises_on_duplicate_tensor_names(invalid_rdf_v0_4_0_duplicate_tensor_names): - from bioimageio.spec.model.schema import Model - from bioimageio.spec.model.v0_3.schema import Model as Model_v03 - - assert yaml is not None - data = yaml.load(invalid_rdf_v0_4_0_duplicate_tensor_names) - - model_schema = Model() - with pytest.raises(ValidationError): - model_schema.load(data) - - # as 0.3 the model should still be valid with some small changes - model_schema = Model_v03() - data["format_version"] = "0.3.3" - data["language"] = "python" - data["framework"] = "pytorch" - data["source"] = data["weights"]["pytorch_state_dict"].pop("architecture") - data["kwargs"] = data["weights"]["pytorch_state_dict"].pop("kwargs") - data["sha256"] = data["weights"]["pytorch_state_dict"].pop("architecture_sha256") - - valid_data = model_schema.load(data) - assert valid_data - - -def test_output_fixed_shape_too_small(model_dict): - from bioimageio.spec.model.schema import Model - - model_dict["outputs"] = [ - { - "name": "output_1", - "description": "Output 1", - "data_type": "float32", - "axes": "xyc", - "shape": [128, 128, 3], - "halo": [32, 128, 0], - } - ] - - with pytest.raises(ValidationError) as e: - Model().load(model_dict) - - assert e.value.messages == { - "_schema": ["Minimal shape [128 128 3] of output output_1 is too small for halo [32, 128, 0]."] - } - - -def test_output_ref_shape_mismatch(model_dict): - from bioimageio.spec.model.schema import Model - - model_dict["outputs"] = [ - { - "name": "output_1", - "description": "Output 1", - "data_type": "float32", - "axes": "xyc", - "shape": {"reference_tensor": "input_1", "scale": [1, 2, 3, 4], "offset": [0, 0, 0, 0]}, - } - ] - - with pytest.raises(ValidationError) as e: - Model().load(model_dict) - - assert e.value.messages == { - "_schema": [ - "Referenced tensor input_1 with 3 dimensions does not match output tensor output_1 with 4 dimensions." - ] - } - - -def test_output_ref_shape_too_small(model_dict): - from bioimageio.spec.model.schema import Model - - model_dict["outputs"] = [ - { - "name": "output_1", - "description": "Output 1", - "data_type": "float32", - "axes": "xyc", - "shape": {"reference_tensor": "input_1", "scale": [1, 2, 3], "offset": [0, 0, 0]}, - "halo": [256, 128, 0], - } - ] - - with pytest.raises(ValidationError) as e: - Model().load(model_dict) - - assert e.value.messages == { - "_schema": ["Minimal shape [128. 256. 9.] of output output_1 is too small for halo [256, 128, 0]."] - } - - -@pytest.mark.skipif(SKIP_ZENODO, reason=SKIP_ZENODO_REASON) -def test_model_has_parent_with_uri(model_dict): - from bioimageio.spec.model.schema import Model - - model_dict["parent"] = dict(uri="https://doi.org/10.5281/zenodo.5744489") - - valid_data = Model().load(model_dict) - assert isinstance(valid_data, raw_nodes_m04.Model) - - -@pytest.mark.skipif(SKIP_ZENODO, reason=SKIP_ZENODO_REASON) -def test_model_has_parent_with_id(model_dict): - from bioimageio.spec.model.schema import Model - - model_dict["parent"] = dict(id="10.5281/zenodo.5764892") - - valid_data = Model().load(model_dict) - assert isinstance(valid_data, raw_nodes_m04.Model) - - -def test_model_with_expanded_output(model_dict): - from bioimageio.spec.model.schema import Model - - model_dict["outputs"] = [ - { - "name": "output_1", - "description": "Output 1", - "data_type": "float32", - "axes": "xyzc", - "shape": dict( - scale=[1, 1, None, 1], - offset=[0, 0, 7, 0], - reference_tensor="input_1", - ), - } - ] - - model = Model().load(model_dict) - assert isinstance(model, raw_nodes_m04.Model) - out0_shape = model.outputs[0].shape - assert isinstance(out0_shape, raw_nodes_m04.ImplicitOutputShape) - assert out0_shape.scale == [1, 1, None, 1] diff --git a/tests/test_schemas_input_output_tensors.py b/tests/test_schemas_input_output_tensors.py deleted file mode 100644 index a2b493766..000000000 --- a/tests/test_schemas_input_output_tensors.py +++ /dev/null @@ -1,51 +0,0 @@ -import pytest -from marshmallow import missing - -from bioimageio.spec.model import raw_nodes, schema - - -def test_tensor_schema_preprocessing(): - data = { - "name": "input_1", - "description": "Input 1", - "data_type": "float32", - "axes": "xyc", - "shape": [128, 128, 3], - "preprocessing": [ - { - "name": "scale_range", - "kwargs": {"max_percentile": 99, "min_percentile": 5, "mode": "per_sample", "axes": "xy"}, - } - ], - } - validated_data = schema.InputTensor().load(data) - assert isinstance(validated_data, raw_nodes.InputTensor) - assert validated_data.name == data["name"] - assert validated_data.description == data["description"] - assert validated_data.data_type == data["data_type"] - assert validated_data.axes == data["axes"] - assert validated_data.shape == data["shape"] - - assert isinstance(validated_data.preprocessing, list) - assert len(validated_data.preprocessing) == 1 - preprocessing = validated_data.preprocessing[0] - assert preprocessing.name == "scale_range" - - -@pytest.mark.parametrize( - "data", - [ - {"name": "input_1", "description": "Input 1", "data_type": "float32", "axes": "xyc", "shape": [128, 128, 3]}, - {"name": "input_1", "description": "Input 1", "data_type": "float32", "axes": "xyc", "shape": [128, 128, 3]}, - ], -) -def test_tensor_schema_no_preprocessing(data): - validated_data = schema.InputTensor().load(data) - assert validated_data.preprocessing is missing - - -@pytest.mark.parametrize("schema_instance", [schema.InputTensor(), schema.OutputTensor()]) -def test_tensor_schema_optional_description(schema_instance): - data = {"name": "tensor_1", "data_type": "float32", "axes": "xyc", "shape": [128, 128, 3]} - validated_data = schema_instance.load(data) - assert validated_data.description is missing diff --git a/tests/test_schemas_processing.py b/tests/test_schemas_processing.py deleted file mode 100644 index 8f1430dae..000000000 --- a/tests/test_schemas_processing.py +++ /dev/null @@ -1,60 +0,0 @@ -from typing import Union - -import pytest -from marshmallow import ValidationError -from pytest import raises - - -pre_and_post_processing = [ - (True, "binarize", {"threshold": 0.5}), - (False, "binarize", {"mode": "fixed", "threshold": 0.5}), - (True, "clip", {"min": 0.2, "max": 0.5}), - (False, "clip", {"min": "min", "max": 0.5}), - (True, "scale_linear", {"gain": 2, "offset": 0.5, "axes": "xy"}), - (False, "scale_linear", {"gain": 2, "offset": 0.5, "axes": "b"}), - (True, "sigmoid", {}), - (False, "sigmoid", {"axes": "x"}), - (True, "zero_mean_unit_variance", {"mode": "fixed", "mean": 1, "std": 2, "axes": "xy"}), - (False, "zero_mean_unit_variance", {"mode": "unknown", "mean": 1, "std": 2, "axes": "xy"}), - (True, "scale_range", {"mode": "per_sample", "axes": "xy"}), - (False, "scale_range", {"mode": "fixed", "axes": "xy"}), - (True, "scale_range", {"mode": "per_sample", "axes": "xy", "min_percentile": 5, "max_percentile": 50}), - (False, "scale_range", {"mode": "per_sample", "axes": "xy", "min_percentile": 50, "max_percentile": 50}), - (False, "scale_range", {"mode": "per_sample", "axes": "xy", "min": 0}), -] - - -@pytest.fixture( - params=[ - ("Preprocessing", valid, name, kwargs) - for valid, name, kwargs in pre_and_post_processing - + [(False, "scale_range", {"mode": "per_dataset", "axes": "xy", "reference_tensor": "some_input_tensor_name"})] - ] - + [ - ("Postprocessing", valid, name, kwargs) - for valid, name, kwargs in pre_and_post_processing - + [ - (True, "scale_range", {"mode": "per_sample", "axes": "xy"}), - (True, "scale_range", {"mode": "per_dataset", "axes": "xy", "reference_tensor": "some_input_tensor_name"}), - (True, "scale_mean_variance", {"mode": "per_sample", "reference_tensor": "some_tensor_name"}), - (False, "scale_mean_variance", {"mode": "per_sample"}), - (True, "scale_mean_variance", {"mode": "per_dataset", "reference_tensor": "some_tensor_name"}), - (False, "scale_mean_variance", {"mode": "per_dataset"}), - ] - ] -) -def processing_test_data(request): - return request.param - - -def test_processing(processing_test_data): - proc_class_name, valid, name, kwargs = processing_test_data - from bioimageio.spec.model import schema - - proc: Union[schema.Preprocessing, schema.Postprocessing] = getattr(schema, proc_class_name)() - - if valid: - proc.load({"name": name, "kwargs": kwargs}) - else: - with raises(ValidationError): - proc.load({"name": name, "kwargs": kwargs}) diff --git a/tests/test_shared/test_resolve_source.py b/tests/test_shared/test_resolve_source.py deleted file mode 100644 index 05584f753..000000000 --- a/tests/test_shared/test_resolve_source.py +++ /dev/null @@ -1,32 +0,0 @@ -import os -from pathlib import Path -from typing import Optional - -import pytest - -from bioimageio.spec.shared.raw_nodes import URI - - -def mock_download(uri: URI, output: Optional[os.PathLike] = None, pbar=None): - return Path(__file__).resolve() - - -@pytest.mark.parametrize( - "src", - [ - "https://example.com/fake", - Path(__file__), - __file__, - URI("https://example.com/fake"), - ], -) -def test_get_resolved_source_path(src): - from bioimageio.spec.shared import get_resolved_source_path - - import bioimageio.spec - - bioimageio.spec.shared._resolve_source._download_url = mock_download - res = get_resolved_source_path(src, root_path=Path(__file__).parent) - assert isinstance(res, Path) - assert res.exists() - assert res == Path(__file__).resolve() diff --git a/tests/test_specific_reexports_generics.py b/tests/test_specific_reexports_generics.py new file mode 100644 index 000000000..a160c9e88 --- /dev/null +++ b/tests/test_specific_reexports_generics.py @@ -0,0 +1,117 @@ +from types import ModuleType +from typing import Any, Dict + +import pytest + +from bioimageio.spec import application, collection, dataset, generic, model, notebook + +IGNORE_MEMBERS = { + "AfterValidator", + "ALERT", + "Annotated", + "annotations", + "Any", + "as_warning", + "assert_never", + "BioimageioYamlContent", + "collections", + "convert_from_older_format", + "Converter", + "CoverImageSource", + "DeprecatedLicenseId", + "Dict", + "DocumentationSource", + "EmailStr", + "field_validator", + "Field", + "FileSource", + "Ge", + "get_args", + "ImportantFileSource", + "include_in_package_serializer", + "issue_warning", + "Len", + "LicenseId", + "LICENSES", + "List", + "LowerCase", + "Mapping", + "MarkdownSource", + "MaxLen", + "model_validator", + "Node", + "NotEmpty", + "Optional", + "partial", + "Predicate", + "requests", + "ResourceDescrBase", + "ResourceDescrType", + "Self", + "Sequence", + "settings", + "TAG_CATEGORIES", + "TypeVar", + "Union", + "V_suffix", + "v0_2", + "v0_3", + "v0_4", + "v0_5", + "validate_gh_user", + "validate_suffix", + "validation_context_var", + "ValidationInfo", + "warn", + "WithSuffix", + "YamlValue", +} + + +def get_members(m: ModuleType): + return { + k: getattr(m, k) + for k in dir(m) + if not k.startswith("_") and k not in IGNORE_MEMBERS + } + + +GENERIC_ONLY_MEMBERS = { + "GenericDescr", + "GenericDescrBase", + "GenericModelDescrBase", + "KNOWN_SPECIFIC_RESOURCE_TYPES", + "VALID_COVER_IMAGE_EXTENSIONS", +} + +GENERIC_v0_2_MEMBERS = { + k: v for k, v in get_members(generic.v0_2).items() if k not in GENERIC_ONLY_MEMBERS +} +GENERIC_v0_3_MEMBERS = { + k: v for k, v in get_members(generic.v0_3).items() if k not in GENERIC_ONLY_MEMBERS +} + + +@pytest.mark.parametrize( + "generic_members,specific", + [ + (GENERIC_v0_2_MEMBERS, application.v0_2), + (GENERIC_v0_2_MEMBERS, collection.v0_2), + (GENERIC_v0_2_MEMBERS, dataset.v0_2), + (GENERIC_v0_2_MEMBERS, model.v0_4), + (GENERIC_v0_2_MEMBERS, notebook.v0_2), + (GENERIC_v0_3_MEMBERS, application.v0_3), + (GENERIC_v0_3_MEMBERS, collection.v0_3), + (GENERIC_v0_3_MEMBERS, dataset.v0_3), + (GENERIC_v0_3_MEMBERS, model.v0_5), + (GENERIC_v0_3_MEMBERS, notebook.v0_3), + ], +) +def test_specific_module_has_all_generic_symbols( + generic_members: Dict[str, Any], specific: ModuleType +): + members = get_members(specific) + missing = {k for k in generic_members if k not in members} + assert not missing + unidentical = {k for k, v in generic_members.items() if v is not members[k]} + assert not unidentical diff --git a/tests/test_sub_schemas.py b/tests/test_sub_schemas.py deleted file mode 100644 index 70bd3be8c..000000000 --- a/tests/test_sub_schemas.py +++ /dev/null @@ -1,68 +0,0 @@ -import pytest - -from bioimageio.spec.shared import fields - - -def test_cite_entry(): - from bioimageio.spec.rdf.schema import CiteEntry - - data = { - "text": "Title", - "doi": "https://doi.org/10.1109/5.771073", - "url": "https://ieeexplore.ieee.org/document/771073", - } - - CiteEntry().load(data) - - -def test_cite_field_option1(): - """only way we allow to specify listed, nested schemas. - - Limitation to allow better exception and warning messages and make the code in general more concise. - """ - from bioimageio.spec.rdf.schema import CiteEntry - - data = [ - { - "text": "Title", - "doi": "https://doi.org/10.1109/5.771073", - "url": "https://ieeexplore.ieee.org/document/771073", - } - ] * 2 - - cite_field = fields.List(fields.Nested(CiteEntry()), required=True) - cite_field.deserialize(data) - - -# we (arbitrarily) don't allow this. Test for reference only. see fields.Nested for details -# def test_cite_field_option2(): -# from bioimageio.spec.rdf.schema import CiteEntry -# -# data = [ -# { -# "text": "Title", -# "doi": "https://doi.org/10.1109/5.771073", -# "url": "https://ieeexplore.ieee.org/document/771073", -# } -# ] * 2 -# -# cite_field = fields.Nested(CiteEntry(many=True), required=True) -# out = cite_field.deserialize(data) -# assert len(out) == 2 - - -# we (arbitrarily) don't allow this. Test for reference only. see fields.Nested for details -# def test_cite_field_option3(): -# from bioimageio.spec.rdf.schema import CiteEntry -# -# data = [ -# { -# "text": "Title", -# "doi": "https://doi.org/10.1109/5.771073", -# "url": "https://ieeexplore.ieee.org/document/771073", -# } -# ] * 2 -# -# cite_field = fields.Nested(CiteEntry(many=True), many=True, required=True) -# out = cite_field.deserialize(data) -# assert len(out) == 2 diff --git a/tests/test_validation_errors.py b/tests/test_validation_errors.py deleted file mode 100644 index 07be98f54..000000000 --- a/tests/test_validation_errors.py +++ /dev/null @@ -1,34 +0,0 @@ -"""check for meaningful validation errors for various invalid input""" -from bioimageio.spec.shared import yaml - - -def test_list_instead_of_nested_schema(unet2d_nuclei_broad_latest): - from bioimageio.spec.commands import validate - - assert yaml is not None - data = yaml.load(unet2d_nuclei_broad_latest) - - # set wrong run_mode (list) - data["run_mode"] = [{"name": "something"}] - - error = validate(data)["error"] - assert isinstance(error, dict) - assert len(error) == 1 - assert error["run_mode"] == ["Expected dictionary, but got list."] - - -def test_forward_compatibility_error(unet2d_fixed_shape): - from bioimageio.spec.commands import validate - - assert yaml is not None - data = yaml.load(unet2d_fixed_shape) - - data["authors"] = 42 # make sure rdf is invalid - data["format_version"] = "9999.0.0" # assume it is valid in a future format version - - error = validate(data)["error"] - - # even though the format version is correctly formatted, it should be mentioned here as we treat the future format - # version as the current latest. If this attempted forward compatibility fails we have to report that we did it. - assert isinstance(error, dict) - assert "format_version" in error diff --git a/tests/test_weights_formats.py b/tests/test_weights_formats.py deleted file mode 100644 index 93777d911..000000000 --- a/tests/test_weights_formats.py +++ /dev/null @@ -1,18 +0,0 @@ -from bioimageio.spec.shared.common import get_args - - -def test_weights_formats_have_raw_nodes(): - from bioimageio.spec.model import raw_nodes, schema - - weights_formats = [wf for wf in get_args(raw_nodes.WeightsFormat)] - weights_entry_class_names = [wf.title().replace("_", "") + "WeightsEntry" for wf in weights_formats] - - # all defined weights formats need their schema and nodes implemented... - for wecn in weights_entry_class_names: - assert hasattr(schema, wecn), wecn - assert hasattr(raw_nodes, wecn), wecn - - # every WeightEntry schema needs to validate its corresponding weights_format - for wf, wecn in zip(weights_formats, weights_entry_class_names): - comparable = getattr(schema, wecn)().fields["weights_format"].validate.comparable - assert comparable == wf, (comparable, wf) diff --git a/tests/utils.py b/tests/utils.py new file mode 100644 index 000000000..eaa78e4e7 --- /dev/null +++ b/tests/utils.py @@ -0,0 +1,255 @@ +from contextlib import nullcontext +from copy import deepcopy +from pathlib import Path +from typing import ( + Any, + ContextManager, + Dict, + Mapping, + Optional, + Protocol, + Sequence, + Set, + Type, + Union, +) + +import jsonschema +import pytest +from deepdiff import DeepDiff +from pydantic import ( + DirectoryPath, + RootModel, + TypeAdapter, + ValidationError, + create_model, # type: ignore +) +from ruyaml import YAML + +from bioimageio.spec._description import InvalidDescr, build_description +from bioimageio.spec._internal.common_nodes import Node +from bioimageio.spec._internal.io import download +from bioimageio.spec._internal.root_url import RootHttpUrl +from bioimageio.spec._internal.url import HttpUrl +from bioimageio.spec._internal.validation_context import ValidationContext +from bioimageio.spec.application.v0_2 import ApplicationDescr as ApplicationDescr02 +from bioimageio.spec.collection.v0_2 import CollectionDescr as CollectionDescr02 +from bioimageio.spec.dataset.v0_2 import DatasetDescr as DatasetDescr02 +from bioimageio.spec.generic._v0_2_converter import DOI_PREFIXES +from bioimageio.spec.generic.v0_2 import GenericDescr as GenericDescr02 +from bioimageio.spec.model.v0_4 import ModelDescr as ModelDescr04 +from bioimageio.spec.notebook.v0_2 import NotebookDescr as NotebookDescr02 + +yaml = YAML(typ="safe") + + +unset = object() + + +def check_node( + node_class: Type[Node], + kwargs: Union[Dict[str, Any], Node], + *, + context: Optional[ValidationContext] = None, + expected_dump_json: Any = unset, + expected_dump_python: Any = unset, + is_invalid: bool = False, +): + if is_invalid: + assert expected_dump_json is unset + assert expected_dump_python is unset + + error_context: ContextManager = pytest.raises(ValidationError) if is_invalid else nullcontext() # type: ignore + with error_context: + node = node_class.model_validate( + kwargs, + context=context or ValidationContext(root=Path(__file__).parent), + ) + + if expected_dump_json is not unset: + actual = node.model_dump(mode="json") + assert actual, expected_dump_json + + if expected_dump_python is not unset: + actual = node.model_dump(mode="python") + assert actual, expected_dump_python + + +class DummyNodeBase(Node): + value: Any + + +def check_type( + type_: Union[Any, Type[Any]], + value: Any, + expected: Any = unset, + expected_root: Any = unset, + expected_deserialized: Any = unset, + *, + is_invalid: bool = False, +): + type_adapter = TypeAdapter(type_) + error_context: ContextManager = pytest.raises(ValidationError) if is_invalid else nullcontext() # type: ignore + + with error_context: + actual = type_adapter.validate_python(value) + + if expected is not unset: + assert actual == expected, (actual, expected) + + if expected_root is not unset: + assert isinstance(actual, RootModel) + assert actual.root == expected_root, (actual.root, expected_root) + + if expected_deserialized is not unset: + actual_deserialized = type_adapter.dump_python( + actual, mode="json", exclude_unset=True + ) + assert actual_deserialized == expected_deserialized, ( + actual_deserialized, + expected_deserialized, + ) + + node = create_model("DummyNode", value=(type_, ...), __base__=DummyNodeBase) + with error_context: + actual_node = node.model_validate(dict(value=value)) + + if expected is not unset: + assert actual_node.value == expected, (actual_node.value, expected) + + if expected_root is not unset: + assert isinstance(actual_node.value, RootModel) + assert actual_node.value.root == expected_root, ( + actual_node.value.root, + expected_root, + ) + + if expected_deserialized is not unset: + node_deserialized = actual_node.model_dump(mode="json", exclude_unset=True) + assert node_deserialized["value"] == expected_deserialized, ( + node_deserialized["value"], + expected_deserialized, + ) + + +def check_bioimageio_yaml( + source: Union[Path, HttpUrl], + /, + *, + root: Union[RootHttpUrl, DirectoryPath] = Path(), + as_latest: bool, + exclude_fields_from_roundtrip: Set[str] = set(), + is_invalid: bool = False, + bioimageio_json_schema: Optional[Mapping[Any, Any]], + perform_io_checks: bool = True, +) -> None: + downloaded_source = download(source) + root = downloaded_source.original_root + with downloaded_source.path.open(encoding="utf-8") as f: + data: Union[Any, Dict[Any, Any]] = yaml.load(f) + + assert isinstance(data, dict), type(data) + format_version = "latest" if as_latest else "discover" + with ValidationContext( + root=root, + file_name=downloaded_source.original_file_name, + perform_io_checks=perform_io_checks, + ): + rd = build_description(deepcopy(data), format_version=format_version) + assert not is_invalid or ( + is_invalid and isinstance(rd, InvalidDescr) + ), "Invalid RDF passed validation" + + summary = rd.validation_summary + assert summary is not None + if is_invalid: + assert summary.status == "failed", "passes despite marked as known failure case" + assert isinstance(rd, InvalidDescr) + return + + assert summary.status == "passed", summary.format() + assert rd is not None + + json_data = rd.model_dump(mode="json") + # check compatibility to our latest json schema... + if ( + bioimageio_json_schema is not None + and "v0_" + not in downloaded_source.path.name # ...unless it's a historic example + ): + try: + jsonschema.validate(json_data, bioimageio_json_schema) + except jsonschema.ValidationError: + # TODO: improve error message/log + raise ValueError( + f"jsonschema validation error for {downloaded_source.path}" + ) + + if as_latest: + return + + # check rountrip + exclude_from_comp = { + "format_version", + "timestamp", + *exclude_fields_from_roundtrip, + } + if isinstance( + rd, + ( + ModelDescr04, + ApplicationDescr02, + CollectionDescr02, + DatasetDescr02, + GenericDescr02, + NotebookDescr02, + ), + ): + # these fields may intentionally be manipulated + exclude_from_comp |= {"version_number", "id_emoji", "id"} + + deserialized = rd.model_dump( + mode="json", exclude=exclude_from_comp, exclude_unset=True + ) + expect_back = {k: v for k, v in data.items() if k not in exclude_from_comp} + assert_rdf_dict_equal( + deserialized, expect_back, f"roundtrip {source}\n", ignore_known_rdf_diffs=True + ) + + +def assert_rdf_dict_equal( + actual: Dict[Any, Any], + expected: Dict[Any, Any], + msg: str = "", + *, + ignore_known_rdf_diffs: bool = False, +): + diff: Any = DeepDiff(expected, actual) + if ignore_known_rdf_diffs: + slim_diff = deepcopy(diff) + VC = "values_changed" + k: Any + for k in diff.get(VC, {}): + if ( + isinstance(k, str) + and k.startswith("root['cite'][") + and k.endswith("]['doi']") + and any(diff[VC][k]["old_value"].startswith(dp) for dp in DOI_PREFIXES) + ): + # 1. we dop 'https://doi.org/' from cite.i.doi field + slim_diff[VC].pop(k) + + if VC in slim_diff and not slim_diff[VC]: + slim_diff.pop(VC) + + diff = slim_diff + + assert not diff, msg + diff.pretty() + + +class ParameterSet(Protocol): + def __init__(self, values: Sequence[Any], marks: Any, id: str) -> None: + super().__init__() + + +WARNING_LEVEL_CONTEXT_KEY = "warning_level"