Skip to content

Commit

Permalink
Adding new/updated DL4MicEverywhere_fnet-3d-zerocostdl4mic_1.13.1
Browse files Browse the repository at this point in the history
  • Loading branch information
IvanHCenalmor committed Oct 15, 2024
1 parent 04c695e commit 488a0f8
Show file tree
Hide file tree
Showing 4 changed files with 204 additions and 0 deletions.
Binary file modified album_catalog_index.db
Binary file not shown.
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
# Changelog
All notable changes to this project will be documented in this file.

The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## [1.13.1] - 2024-10-15
../CHANGELOG.md
154 changes: 154 additions & 0 deletions solutions/DL4MicEverywhere/fnet-3d-zerocostdl4mic/solution.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,154 @@
###album catalog: cellcanvas

# Based on https://github.com/HenriquesLab/DL4MicEverywhere/blob/main/notebooks/ZeroCostDL4Mic_notebooks/fnet_3D_DL4Mic/configuration.yaml
# and https://github.com/betaseg/solutions/blob/main/solutions/io.github.betaseg/cellsketch-plot/solution.py

from album.runner.api import setup
import subprocess

try:
subprocess.check_output('nvidia-smi')
gpu_access = True
except Exception:
gpu_access = False

def install():
from album.runner.api import get_app_path
from git import Repo
import subprocess
import requests
import shutil
import os

# Clone the DL4MicEverywhere repository
clone_url = "https://github.com/HenriquesLab/DL4MicEverywhere"
repo_path = get_app_path().joinpath("DL4MicEverywhere")
Repo.clone_from(clone_url, repo_path)
assert (repo_path.exists())

# URL of the notebook you want to download
notebook_url = "https://raw.githubusercontent.com/HenriquesLab/ZeroCostDL4Mic/master/Colab_notebooks/fnet_3D_ZeroCostDL4Mic.ipynb"

notebook_path = get_app_path().joinpath("fnet_3D_ZeroCostDL4Mic.ipynb")
notebook_path.parent.mkdir(parents=True, exist_ok=True)

response = requests.get(notebook_url)
response.raise_for_status()

with open(notebook_path, 'wb') as notebook_file:
notebook_file.write(response.content)

assert notebook_path.exists(), "Notebook download failed"

# Convert the notebook to its colabless form
section_to_remove = "1.1. 1.2. 2. 6.3."
section_to_remove = section_to_remove.split(' ')

python_command = ["python", ".tools/notebook_autoconversion/transform.py", "-p", f"{get_app_path()}", "-n", "fnet_3D_ZeroCostDL4Mic.ipynb", "-s"]
python_command += section_to_remove

subprocess.run(python_command, cwd=to)
subprocess.run(["mv", get_app_path().joinpath("colabless_fnet_3D_ZeroCostDL4Mic.ipynb"), get_app_path().joinpath("fnet_3D_ZeroCostDL4Mic.ipynb")])

# Remove the cloned DL4MicEverywhere repository
if os.name == 'nt':
os.system(f'rmdir /s /q "{to}"')
else:
# rmtree has no permission to do this on Windows
shutil.rmtree(to)

def run():
from album.runner.api import get_args, get_app_path
import subprocess
import os

# Fetch arguments and paths
args = get_args()
app_path = get_app_path()

# Path to the downloaded notebook
notebook_path = app_path.joinpath("fnet_3D_ZeroCostDL4Mic.ipynb")

# Ensure the notebook exists
assert notebook_path.exists(), "Notebook does not exist"

# Output path for running the notebook
output_path = args.path
os.makedirs(output_path, exist_ok=True)
print(f"Saving output to {output_path}")

# Set the LD_LIBRARY_PATH to allow TensorFlow to find the CUDA libraries
global gpu_access
if gpu_access:
os.environ["LD_LIBRARY_PATH"] = f"{os.environ['LD_LIBRARY_PATH']}:{os.environ['CONDA_PREFIX']}/lib"

# Optionally, launch the Jupyter notebook to show the results
subprocess.run(["jupyter", "lab", str(notebook_path)], cwd=str(output_path))

if gpu_access:
channels = """
- conda-forge
- nvidia
- anaconda
- defaults
"""
dependencies = """
- python=3.7
- cudatoolkit=11.8.0
- cudnn=8.6.0
- pip
- pkg-config
"""
else:
channels = """
- conda-forge
- defaults
"""
dependencies = f"""
- python=3.7
- pip
- pkg-config
"""

env_file = f"""
channels:
{channels}
dependencies:
{dependencies}
- pip:
- GitPython==3.1.43
- matplotlib==2.2.3
- numpy==1.18.0
- pandas>=0.21.1
- tifffile==2019.7.26
- tqdm==4.19.5
- scikit-image==0.18.0
- argschema
- scipy==1.4.1
- torch==1.4.0
- astropy==3.2.3
- fpdf2==2.7.4
"""

setup(
group="DL4MicEverywhere",
name="fnet-3d-zerocostdl4mic",
version="1.13.1",
solution_creators=["DL4Mic team", "album team"],
title="fnet-3d-zerocostdl4mic implementation.",
description="Paired image-to-image translation of 3D images. Label-free Prediction (fnet) is a neural network used to infer the features of cellular structures from brightfield or EM images without coloured labels. The network is trained using paired training images from the same field of view, imaged in a label-free (e.g. brightfield) and labelled condition (e.g. fluorescent protein). When trained, this allows the user to identify certain structures from brightfield images alone. The performance of fnet may depend significantly on the structure at hand. Note - visit the ZeroCostDL4Mic wiki to check the original publications this network is based on and make sure you cite these.",
documentation="https://raw.githubusercontent.com/HenriquesLab/ZeroCostDL4Mic/master/BioimageModelZoo/README.md",
tags=['colab', 'notebook', 'fnet', 'labelling', 'ZeroCostDL4Mic', '3D', 'dl4miceverywhere'],
args=[{
"name": "path",
"type": "string",
"default": ".",
"description": "What is your working path?"
}],
cite=[{'doi': 'https://doi.org/10.1038/s41467-021-22518-0', 'text': 'von Chamier, L., Laine, R.F., Jukkala, J. et al. Democratising deep learning for microscopy with ZeroCostDL4Mic. Nat Commun 12, 2276 (2021). https://doi.org/10.1038/s41467-021-22518-0'}, {'doi': 'https://doi.org/10.1038/s41592-018-0111-2', 'text': 'Ounkomol, C., Seshamani, S., Maleckar, M.M. et al. Label-free prediction of three-dimensional fluorescence images from transmitted-light microscopy. Nat Methods 15, 917–920 (2018). https://doi.org/10.1038/s41592-018-0111-2'}],
album_api_version="0.5.1",
covers=[],
run=run,
install=install,
dependencies={"environment_file": env_file},
)
42 changes: 42 additions & 0 deletions solutions/DL4MicEverywhere/fnet-3d-zerocostdl4mic/solution.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
album_api_version: 0.5.1
args:
- default: .
description: What is your working path?
name: path
type: string
changelog: ../CHANGELOG.md
cite:
- doi: https://doi.org/10.1038/s41467-021-22518-0
text: von Chamier, L., Laine, R.F., Jukkala, J. et al. Democratising deep learning
for microscopy with ZeroCostDL4Mic. Nat Commun 12, 2276 (2021). https://doi.org/10.1038/s41467-021-22518-0
- doi: https://doi.org/10.1038/s41592-018-0111-2
text: "Ounkomol, C., Seshamani, S., Maleckar, M.M. et al. Label-free prediction\
\ of three-dimensional fluorescence images from transmitted-light microscopy.\
\ Nat Methods 15, 917\u2013920 (2018). https://doi.org/10.1038/s41592-018-0111-2"
covers: []
description: Paired image-to-image translation of 3D images. Label-free Prediction
(fnet) is a neural network used to infer the features of cellular structures from
brightfield or EM images without coloured labels. The network is trained using paired
training images from the same field of view, imaged in a label-free (e.g. brightfield)
and labelled condition (e.g. fluorescent protein). When trained, this allows the
user to identify certain structures from brightfield images alone. The performance
of fnet may depend significantly on the structure at hand. Note - visit the ZeroCostDL4Mic
wiki to check the original publications this network is based on and make sure you
cite these.
documentation: https://raw.githubusercontent.com/HenriquesLab/ZeroCostDL4Mic/master/BioimageModelZoo/README.md
group: DL4MicEverywhere
name: fnet-3d-zerocostdl4mic
solution_creators:
- DL4Mic team
- album team
tags:
- colab
- notebook
- fnet
- labelling
- ZeroCostDL4Mic
- 3D
- dl4miceverywhere
timestamp: '2024-10-15T17:52:01.297896'
title: fnet-3d-zerocostdl4mic implementation.
version: 1.13.1

0 comments on commit 488a0f8

Please sign in to comment.