Skip to content

Commit

Permalink
More work.
Browse files Browse the repository at this point in the history
  • Loading branch information
tsalo committed Dec 2, 2024
1 parent 695136f commit 2da1a28
Show file tree
Hide file tree
Showing 2 changed files with 51 additions and 15 deletions.
42 changes: 42 additions & 0 deletions src/smripost_linc/utils/parcellation.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
"""Utility functions for parcellation."""


def gifti_to_annot(gifti, atlas, hemi, labels_file):
"""Create .annot files from a nifti file and a json file."""
import os

import nibabel as nb
import numpy as np
import pandas as pd

labels_df = pd.read_table(labels_file)
atlas_labels = labels_df['label'].tolist()

gifti_img = nb.load(gifti)
colors = _create_colors(len(atlas_labels))

annot = os.path.abspath(f'{hemi}.{atlas}.annot')
nb.freesurfer.write_annot(
annot,
labels=gifti_img.agg_data().astype(np.int32),
ctab=colors,
names=atlas_labels,
fill_ctab=True,
)

return annot


def _create_colors(n_colors):
"""Create RGBT-format colors for annotation files."""
import numpy as np

color_set = {(0, 0, 0, 0)}
while len(color_set) < n_colors:
new_color = tuple((np.random.rand(3) * 155).astype(np.int32)) + (0,)
color_set.add(new_color)
color_mat = np.array(sorted(color_set))
if color_mat.shape[0] != n_colors:
raise ValueError(f'Could not generate {n_colors} unique colors.')

return color_mat
24 changes: 9 additions & 15 deletions src/smripost_linc/workflows/parcellation.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,20 +19,6 @@ def remove_non_alphabetic(input_string):
return clean_string


def _create_colors(n_colors):
import numpy as np

color_set = {(0, 0, 0, 0)}
while len(color_set) < n_colors:
new_color = tuple((np.random.rand(3) * 155).astype(np.int32)) + (0,)
color_set.add(new_color)
color_mat = np.array(sorted(color_set))
if color_mat.shape[0] != n_colors:
raise ValueError(f'Could not generate {n_colors} unique colors.')

return color_mat


def fake_neuroparc_from_nifti(nifti_file):
"""Create a fake neuroparc JSON from a nifti file."""
import nibabel as nb
Expand All @@ -44,7 +30,6 @@ def fake_neuroparc_from_nifti(nifti_file):


def fill_missing_parc(spec):

maxval = max(map(int, spec.keys()))
for key in range(maxval):
strkey = str(key)
Expand Down Expand Up @@ -166,6 +151,7 @@ def init_load_atlases_wf(name='load_atlases_wf'):
from smripost_linc.interfaces.bids import DerivativesDataSink
from smripost_linc.utils.bids import collect_atlases
from smripost_linc.utils.boilerplate import describe_atlases
from smripost_linc.utils.parcellation import gifti_to_annot

workflow = Workflow(name=name)
output_dir = config.execution.output_dir
Expand Down Expand Up @@ -316,6 +302,14 @@ def init_load_atlases_wf(name='load_atlases_wf'):
(inputnode, annot_node, [(('atlas_file', selecter), f'in{i_atlas + 1}')]),
]) # fmt:skip

if info['format'] != 'annot':
convert_gifti_to_annot = pe.Node(
niu.Function(
function=gifti_to_annot,
),
name=f'convert_gifti_to_annot_{atlas}_{hemi}',
)

atlas_srcs = pe.MapNode(
BIDSURI(
numinputs=1,
Expand Down

0 comments on commit 2da1a28

Please sign in to comment.