Skip to content

Commit

Permalink
Merge pull request #4 from mlexchange/tibbers-dev
Browse files Browse the repository at this point in the history
Tibbers-Dev
  • Loading branch information
Wiebke authored Feb 23, 2024
2 parents 1cc40eb + e6b68bb commit 4a7e2a0
Show file tree
Hide file tree
Showing 17 changed files with 3,088 additions and 821 deletions.
45 changes: 45 additions & 0 deletions .github/workflows/publish-image.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
name: Create and publish image

on:
push:
branches: ['main']
tags: ['v*']

env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}

jobs:
build-and-push-image:
runs-on: ubuntu-latest
permissions:
contents: read
packages: write

steps:
- name: Checkout repository
uses: actions/checkout@v3
with:
fetch-depth: 0

- name: Log in to the Container registry
uses: docker/login-action@v2
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}

- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@v4
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}

- name: Build and push Docker image
uses: docker/build-push-action@v4
with:
context: .
file: Dockerfile
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
10 changes: 10 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,13 @@
notebooks/models
results/
logs.txt

.DS_Store

# dvc related for testing
.dvc*
dvc*

# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
Expand Down
10 changes: 10 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
FROM python:3.9

COPY requirements.txt requirements.txt

RUN pip install --upgrade pip
RUN pip install -r requirements.txt

WORKDIR /app/work/
COPY src/ src/
CMD ["bash"]
170 changes: 170 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,170 @@
# Makefile for testing main.py

# Load environment variables from .env file
include .env
export $(shell sed 's/=.*//' .env)

# Define variables

MASK_IDX = "[10, 201, 222, 493]"
SHIFT = 2
SAVE_PATH = results/models
UID = uid0001

MSDNET_PARAMETERS_1 = '{"network": "MSDNet", \
"num_classes": 3, \
"num_epochs": 3, \
"optimizer": "Adam", \
"criterion": "CrossEntropyLoss", \
"learning_rate": 0.1, \
"activation": "ReLU", \
"normalization": "BatchNorm2d", \
"convolution": "Conv2d", \
"msdnet_parameters": { \
"layer_width": 1, \
"num_layers": 3, \
"custom_dilation": false, \
"max_dilation": 5 \
}, \
"dataloaders": { \
"shuffle_train": true, \
"batch_size_train": 1, \
"shuffle_val": true, \
"batch_size_val": 1, \
"shuffle_inference": false, \
"batch_size_inference": 1, \
"val_pct": 0.2 \
} \
}'

MSDNET_PARAMETERS_2 = '{"network": "MSDNet", \
"num_classes": 3, \
"num_epochs": 3, \
"optimizer": "Adam", \
"criterion": "CrossEntropyLoss", \
"learning_rate": 0.1, \
"activation": "ReLU", \
"normalization": "BatchNorm2d", \
"convolution": "Conv2d", \
"msdnet_parameters": { \
"layer_width": 1, \
"num_layers": 3, \
"custom_dilation": true, \
"dilation_array": [1,2,4] \
}, \
"dataloaders": { \
"shuffle_train": true, \
"batch_size_train": 1, \
"shuffle_val": true, \
"batch_size_val": 1, \
"shuffle_inference": false, \
"batch_size_inference": 1, \
"val_pct": 0.2 \
} \
}'

TUNET_PARAMETERS = '{"network": "TUNet", \
"num_classes": 3, \
"num_epochs": 3, \
"optimizer": "Adam", \
"criterion": "CrossEntropyLoss", \
"learning_rate": 0.1, \
"activation": "ReLU", \
"normalization": "BatchNorm2d", \
"convolution": "Conv2d", \
"tunet_parameters": { \
"depth": 4, \
"base_channels": 8, \
"growth_rate": 2, \
"hidden_rate": 1 \
}, \
"dataloaders": { \
"shuffle_train": true, \
"batch_size_train": 1, \
"shuffle_val": true, \
"batch_size_val": 1, \
"shuffle_inference": false, \
"batch_size_inference": 1, \
"val_pct": 0.2 \
} \
}'

TUNET3PLUS_PARAMETERS = '{"network": "TUNet3+", \
"num_classes": 3, \
"num_epochs": 3, \
"optimizer": "Adam", \
"criterion": "CrossEntropyLoss", \
"learning_rate": 0.1, \
"activation": "ReLU", \
"normalization": "BatchNorm2d", \
"convolution": "Conv2d", \
"tunet3plus_parameters": { \
"depth": 4, \
"base_channels": 8, \
"growth_rate": 2, \
"hidden_rate": 1, \
"carryover_channels": 8 \
}, \
"dataloaders": { \
"shuffle_train": true, \
"batch_size_train": 1, \
"shuffle_val": true, \
"batch_size_val": 1, \
"shuffle_inference": false, \
"batch_size_inference": 1, \
"val_pct": 0.2 \
} \
}'

# Define the default target
#.PHONY:

# =================Training Commands==================================== #
train_msdnet_maxdil:
python src/train.py $(DATA_TILED_URI) $(MASK_TILED_URI) \
$(DATA_TILED_API_KEY) $(MASK_TILED_API_KEY) \
$(MASK_IDX) $(SHIFT) $(SAVE_PATH) $(UID) \
$(MSDNET_PARAMETERS_1)

train_msdnet_customdil:
python src/train.py $(DATA_TILED_URI) $(MASK_TILED_URI) \
$(DATA_TILED_API_KEY) $(MASK_TILED_API_KEY) \
$(MASK_IDX) $(SHIFT) $(SAVE_PATH) $(UID) \
$(MSDNET_PARAMETERS_2)

train_tunet:
python src/train.py $(DATA_TILED_URI) $(MASK_TILED_URI) \
$(DATA_TILED_API_KEY) $(MASK_TILED_API_KEY) \
$(MASK_IDX) $(SHIFT) $(SAVE_PATH) $(UID) \
$(TUNET_PARAMETERS)

train_tunet3plus:
python src/train.py $(DATA_TILED_URI) $(MASK_TILED_URI) \
$(DATA_TILED_API_KEY) $(MASK_TILED_API_KEY) \
$(MASK_IDX) $(SHIFT) $(SAVE_PATH) $(UID) \
$(TUNET3PLUS_PARAMETERS)

# =================Inferening Commands==================================== #
segment_msdnet_maxdil:
python src/segment.py $(DATA_TILED_URI) $(MASK_TILED_URI) $(SEG_TILED_URI) \
$(DATA_TILED_API_KEY) $(MASK_TILED_API_KEY) $(SEG_TILED_API_KEY) \
$(MASK_IDX) $(SAVE_PATH) $(UID) \
$(MSDNET_PARAMETERS_1)

segment_msdnet_customdil:
python src/segment.py $(DATA_TILED_URI) $(MASK_TILED_URI) $(SEG_TILED_URI) \
$(DATA_TILED_API_KEY) $(MASK_TILED_API_KEY) $(SEG_TILED_API_KEY) \
$(MASK_IDX) $(SAVE_PATH) $(UID) \
$(MSDNET_PARAMETERS_2)

segment_tunet:
python src/segment.py $(DATA_TILED_URI) $(MASK_TILED_URI) $(SEG_TILED_URI) \
$(DATA_TILED_API_KEY) $(MASK_TILED_API_KEY) $(SEG_TILED_API_KEY) \
$(MASK_IDX) $(SAVE_PATH) $(UID) \
$(TUNET_PARAMETERS)

segment_tunet3plus:
python src/segment.py $(DATA_TILED_URI) $(MASK_TILED_URI) $(SEG_TILED_URI) \
$(DATA_TILED_API_KEY) $(MASK_TILED_API_KEY) $(SEG_TILED_API_KEY) \
$(MASK_IDX) $(SAVE_PATH) $(UID) \
$(TUNET3PLUS_PARAMETERS)
59 changes: 58 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
@@ -1 +1,58 @@
# mlex_dlsia_segmentation_prototype
# mlex_dlsia_segmentation_prototype

This pipeline is built using DLSIA Package to run segmentation tasks for the High_Res Segmentation Application.

Primary goal is to make this compatible with the updated segmentation application as a paperation for the incoming Diamond Beamtime.

## Feature Highlights

- Reading data and mask iput directly from Tiled Server
- Saving segmentation results back to Tiled Server
- Different Neural Network choices (MSDNet, TUNet, TUNet3+)

## To Test

### Set Up Local Tiled Server

This step is recommended in order to keep the Public Tiled Server clean for writing tests, as right now there is lack of good way or deleting containers rather than database modification.

### Request Public Tiled URI for Data and Masks

Please reach out to MLExchange Team.

### Installation

1. Git Clone the repository.

2. Navigate to the repository, then activate a new conda environment (recommended).

3. Install packages.

```
pip install -r requirements.txt
```

4. Set environment variables via a `.env` file to configure a connection to the Tiled server.

```
RECON_TILED_URI = https://tiled-seg.als.lbl.gov/api/v1/metadata/reconstruction/rec20190524_085542_clay_testZMQ_8bit/20190524_085542_clay_testZMQ_
RECON_TILED_API_KEY = <key-provided-on-request>
MASK_TILED_URI = https://tiled-seg.als.lbl.gov/api/v1/metadata/reconstruction/seg-partial-rec20190524_085542_clay_testZMQ_8bit/seg-partial-20190524_085542_clay_testZMQ_
MASK_TILED_API_KEY = <key-provided-on-request>
SEG_TILED_URI = <Local Tiled Server URI> (for example: http://0.0.0.0:8888)
SEG_TILED_API_KEY = <Local Tiled API Key>
```

5. Open a Terminal and use pre-build commands from Makefile for testing:

```
make test_tunet
```

# Copyright
MLExchange Copyright (c) 2023, The Regents of the University of California, through Lawrence Berkeley National Laboratory (subject to receipt of any required approvals from the U.S. Dept. of Energy). All rights reserved.

If you have questions about your rights to use or distribute this software, please contact Berkeley Lab's Intellectual Property Office at [email protected].

NOTICE. This Software was developed under funding from the U.S. Department of Energy and the U.S. Government consequently retains certain rights. As such, the U.S. Government has been granted for itself and others acting on its behalf a paid-up, nonexclusive, irrevocable, worldwide license in the Software to reproduce, distribute copies to the public, prepare derivative works, and perform publicly and display publicly, and to permit others to do so.
Loading

0 comments on commit 4a7e2a0

Please sign in to comment.