Skip to content

Commit

Permalink
Keep working.
Browse files Browse the repository at this point in the history
  • Loading branch information
tsalo committed Dec 3, 2024
1 parent 2da1a28 commit 496f0cd
Show file tree
Hide file tree
Showing 3 changed files with 201 additions and 58 deletions.
99 changes: 99 additions & 0 deletions src/smripost_linc/interfaces/misc.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,13 @@
"""Miscellaneous interfaces for fmriprep-aroma."""

from nipype.interfaces.base import (
CommandLineInputSpec,
File,
TraitedSpec,
isdefined,
traits,
)
from nipype.interfaces.workbench.base import WBCommand as WBCommandBase
from nipype.utils.filemanip import fname_presuffix
from niworkflows.interfaces.fixes import (
FixHeaderApplyTransforms,
Expand Down Expand Up @@ -52,3 +56,98 @@ def _run_interface(self, runtime):

runtime = super()._run_interface(runtime)
return runtime


class _WBCommandInputSpec(CommandLineInputSpec):
num_threads = traits.Int(1, usedefault=True, nohash=True, desc='set number of threads')


class WBCommand(WBCommandBase):
"""A base interface for wb_command.
This inherits from Nipype's WBCommand interface, but adds a num_threads input.
"""

@property
def num_threads(self):
"""Get number of threads."""
return self.inputs.num_threads

@num_threads.setter
def num_threads(self, value):
self.inputs.num_threads = value

def __init__(self, **inputs):
super().__init__(**inputs)

if hasattr(self.inputs, 'num_threads'):
self.inputs.on_trait_change(self._nthreads_update, 'num_threads')

def _nthreads_update(self):
"""Update environment with new number of threads."""
self.inputs.environ['OMP_NUM_THREADS'] = str(self.inputs.num_threads)


class _CiftiSeparateMetricInputSpec(_WBCommandInputSpec):
"""Input specification for the CiftiSeparateMetric command."""

in_file = File(
exists=True,
mandatory=True,
argstr='%s ',
position=0,
desc='The input dense series',
)
direction = traits.Enum(
'ROW',
'COLUMN',
mandatory=True,
argstr='%s ',
position=1,
desc='which dimension to smooth along, ROW or COLUMN',
)
metric = traits.Str(
mandatory=True,
argstr=' -metric %s ',
position=2,
desc='which of the structure eg CORTEX_LEFT CORTEX_RIGHT'
'check https://www.humanconnectome.org/software/workbench-command/-cifti-separate ',
)
out_file = File(
name_source=['in_file'],
name_template='correlation_matrix_%s.func.gii',
keep_extension=True,
argstr=' %s',
position=3,
desc='The gifti output, either left and right',
)


class _CiftiSeparateMetricOutputSpec(TraitedSpec):
"""Output specification for the CiftiSeparateMetric command."""

out_file = File(exists=True, desc='output CIFTI file')


class CiftiSeparateMetric(WBCommand):
"""Extract left or right hemisphere surfaces from CIFTI file (.dtseries).
Other structures can also be extracted.
The input cifti file must have a brain models mapping on the chosen
dimension, columns for .dtseries,
Examples
--------
>>> ciftiseparate = CiftiSeparateMetric()
>>> ciftiseparate.inputs.in_file = 'sub-01XX_task-rest.dtseries.nii'
>>> ciftiseparate.inputs.metric = "CORTEX_LEFT" # extract left hemisphere
>>> ciftiseparate.inputs.out_file = 'sub_01XX_task-rest_hemi-L.func.gii'
>>> ciftiseparate.inputs.direction = 'COLUMN'
>>> ciftiseparate.cmdline
wb_command -cifti-separate 'sub-01XX_task-rest.dtseries.nii' COLUMN \
-metric CORTEX_LEFT 'sub_01XX_task-rest_hemi-L.func.gii'
"""

input_spec = _CiftiSeparateMetricInputSpec
output_spec = _CiftiSeparateMetricOutputSpec
_cmd = 'wb_command -cifti-separate'
2 changes: 1 addition & 1 deletion src/smripost_linc/utils/parcellation.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
"""Utility functions for parcellation."""


def gifti_to_annot(gifti, atlas, hemi, labels_file):
def convert_gifti_to_annot(gifti, atlas, hemi, labels_file):
"""Create .annot files from a nifti file and a json file."""
import os

Expand Down
158 changes: 101 additions & 57 deletions src/smripost_linc/workflows/parcellation.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,10 +148,13 @@ def init_load_atlases_wf(name='load_atlases_wf'):
atlas_files
atlas_labels_files
"""
from neuromaps import transforms

from smripost_linc.interfaces.bids import DerivativesDataSink
from smripost_linc.interfaces.misc import CiftiSeparateMetric
from smripost_linc.utils.bids import collect_atlases
from smripost_linc.utils.boilerplate import describe_atlases
from smripost_linc.utils.parcellation import gifti_to_annot
from smripost_linc.utils.parcellation import convert_gifti_to_annot

workflow = Workflow(name=name)
output_dir = config.execution.output_dir
Expand Down Expand Up @@ -205,7 +208,8 @@ def init_load_atlases_wf(name='load_atlases_wf'):
niu.IdentityInterface(
fields=[
'atlas_names',
'atlas_files',
'lh_atlas_annots',
'rh_atlas_annots',
'atlas_labels_files',
],
),
Expand All @@ -230,40 +234,73 @@ def init_load_atlases_wf(name='load_atlases_wf'):
if info['format'] == 'gifti':
gifti_buffer.inputs.lh_gifti = info['image'][0]
gifti_buffer.inputs.rh_gifti = info['image'][1]

elif info['format'] == 'cifti':
# Split CIFTI into GIFTIs
...
else:
lh_cifti_to_gifti = pe.Node(
CiftiSeparateMetric(
in_file=info['image'],
metric='CORTEX_LEFT',
direction='COLUMN',
num_threads=config.nipype.omp_nthreads,
),
name=f'lh_cifti_to_gifti_{atlas}',
n_procs=config.nipype.omp_nthreads,
)
rh_cifti_to_gifti = pe.Node(
CiftiSeparateMetric(
in_file=info['image'],
metric='CORTEX_RIGHT',
direction='COLUMN',
num_threads=config.nipype.omp_nthreads,
),
name=f'rh_cifti_to_gifti_{atlas}',
n_procs=config.nipype.omp_nthreads,
)
workflow.connect([
(lh_cifti_to_gifti, gifti_buffer, [('out_file', 'lh_gifti')]),
(rh_cifti_to_gifti, gifti_buffer, [('out_file', 'rh_gifti')]),
]) # fmt:skip

elif info['format'] == 'nifti' and info['space'] == 'MNI152NLin6Asym':
# Convert NIfTI to GIFTI
...
nifti_to_gifti = pe.Node(
niu.Function(
function=transforms.mni152_to_fsaverage,
output_names=['lh_gifti', 'rh_gifti'],
),
name=f'nifti_to_gifti_{atlas}',
)
nifti_to_gifti.inputs.img = info['image']
nifti_to_gifti.inputs.fsavg_density = '164k'
nifti_to_gifti.inputs.method = 'nearest'

workflow.connect([
(nifti_to_gifti, gifti_buffer, [
('lh_gifti', 'lh_gifti'),
('rh_gifti', 'rh_gifti'),
]),
]) # fmt:skip

# The space is now fsaverage
info['space'] = 'fsaverage'

elif info['format'] == 'nifti':
raise NotImplementedError(
f'Unsupported format ({info["format"]}) and space ({info["space"]}) combination.'
)

for hemi in ['L', 'R']:
annot_node = lh_annots if hemi == 'L' else rh_annots
selecter = select_first if hemi == 'L' else select_second
# Identify space and file-type of the atlas
if info['space'] == 'MNI152NLin6Asym' and info['format'] == 'nifti':
# Convert MNI152NLin6Asym to annot
create_annot = pe.Node(
niu.Function(
function=create_annots,
),
name=f'create_annot_{atlas}',
)
create_annot.inputs.atlas = atlas

# Warp fsaverage-annot to fsnative-annot
...

elif info['format'] == 'nifti':
raise NotImplementedError('Only MNI152NLin6Asym NIfTI atlases are supported.')

elif info['space'] == 'fsLR':
# Identify space and file-type of the atlas
if info['space'] == 'fsLR':
# Warp atlas from fsLR to fsaverage
warp_fslr_to_fsaverage = pe.Node(
niu.Function(
function=fslr_to_fsaverage,
),
name=f'warp_fslr_to_fsaverage_{atlas}',
name=f'warp_fslr_to_fsaverage_{atlas}_{hemi}',
)
warp_fslr_to_fsaverage.inputs.target_density = '164k'
warp_fslr_to_fsaverage.inputs.hemi = hemi
Expand All @@ -273,42 +310,32 @@ def init_load_atlases_wf(name='load_atlases_wf'):
]) # fmt:skip

# Convert fsaverage to annot
...

# Warp fsaverage-annot to fsnative-annot
...

elif info['space'] == 'fsaverage':
# Convert fsaverage to annot
create_annot = pe.Node(
gifti_to_annot = pe.Node(
niu.Function(
function=create_annots,
function=convert_gifti_to_annot,
),
name=f'create_annot_{atlas}',
name=f'gifti_to_annot_{atlas}_{hemi}',
)
create_annot.inputs.atlas = atlas

# Warp fsaverage-annot to fsnative-annot
...

# Write out fsnative-annot files
workflow.connect([
(create_annot, annot_node, [(('annot', selecter), f'in{i_atlas + 1}')]),
]) # fmt:skip

elif info['space'] == 'fsnative' and info['format'] == 'annot':
# Write out fsnative-annot files
workflow.connect([
(inputnode, annot_node, [(('atlas_file', selecter), f'in{i_atlas + 1}')]),
(warp_fslr_to_fsaverage, gifti_to_annot, [('out', 'in_file')]),
(gifti_to_annot, annot_node, [('out', f'in{i_atlas + 1}')]),
]) # fmt:skip

if info['format'] != 'annot':
convert_gifti_to_annot = pe.Node(
elif info['format'] == 'gifti' and info['space'] == 'fsaverage':
# Convert fsaverage to annot
gifti_to_annot = pe.Node(
niu.Function(
function=gifti_to_annot,
function=convert_gifti_to_annot,
),
name=f'convert_gifti_to_annot_{atlas}_{hemi}',
name=f'gifti_to_annot_{atlas}_{hemi}',
)
workflow.connect([
(gifti_buffer, gifti_to_annot, [(f'{hemi.lower()}_gifti', 'in_file')]),
(gifti_to_annot, annot_node, [('out', f'in{i_atlas + 1}')]),
]) # fmt:skip

elif info['space'] == 'fsnative' and info['format'] == 'annot':
raise Exception()

atlas_srcs = pe.MapNode(
BIDSURI(
Expand All @@ -322,21 +349,38 @@ def init_load_atlases_wf(name='load_atlases_wf'):
)
workflow.connect([(inputnode, atlas_srcs, [('atlas_files', 'in1')])])

copy_atlas = pe.MapNode(
ds_atlas_lh = pe.MapNode(
DerivativesDataSink(),
name='ds_atlas_lh',
iterfield=['in_file', 'atlas', 'meta_dict', 'Sources'],
run_without_submitting=True,
)
workflow.connect([
(inputnode, ds_atlas_lh, [
('name_source', 'name_source'),
('atlas_names', 'atlas'),
('atlas_metadata', 'meta_dict'),
]),
(lh_annots, ds_atlas_lh, [('out', 'in_file')]),
(atlas_srcs, ds_atlas_lh, [('out', 'Sources')]),
(ds_atlas_lh, outputnode, [('out_file', 'lh_atlas_annots')]),
]) # fmt:skip

ds_atlas_rh = pe.MapNode(
DerivativesDataSink(),
name='copy_atlas',
name='ds_atlas_rh',
iterfield=['in_file', 'atlas', 'meta_dict', 'Sources'],
run_without_submitting=True,
)
workflow.connect([
(inputnode, copy_atlas, [
(inputnode, ds_atlas_rh, [
('name_source', 'name_source'),
('atlas_names', 'atlas'),
('atlas_files', 'in_file'),
('atlas_metadata', 'meta_dict'),
]),
(atlas_srcs, copy_atlas, [('out', 'Sources')]),
(copy_atlas, outputnode, [('out_file', 'atlas_files')]),
(lh_annots, ds_atlas_rh, [('out', 'in_file')]),
(atlas_srcs, ds_atlas_rh, [('out', 'Sources')]),
(ds_atlas_rh, outputnode, [('out_file', 'rh_atlas_annots')]),
]) # fmt:skip

copy_atlas_labels_file = pe.MapNode(
Expand Down

0 comments on commit 496f0cd

Please sign in to comment.