Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Small updates #1

Open
wants to merge 13 commits into
base: master
Choose a base branch
from
Empty file modified docs/LICENSE
100644 → 100755
Empty file.
Empty file modified docs/Makefile
100644 → 100755
Empty file.
Empty file modified docs/conf.py
100644 → 100755
Empty file.
Empty file modified docs/contributing.rst
100644 → 100755
Empty file.
Empty file modified docs/elikopy.rst
100644 → 100755
Empty file.
Empty file modified docs/elikopy_project.rst
100644 → 100755
Empty file.
Empty file modified docs/elikopy_usage.rst
100644 → 100755
Empty file.
Empty file modified docs/examples.rst
100644 → 100755
Empty file.
Empty file modified docs/favicon.png
100644 → 100755
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Empty file modified docs/index.rst
100644 → 100755
Empty file.
Empty file modified docs/installation.rst
100644 → 100755
Empty file.
Empty file modified docs/introduction.rst
100644 → 100755
Empty file.
Empty file modified docs/license.rst
100644 → 100755
Empty file.
Empty file modified docs/logo.png
100644 → 100755
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Empty file modified docs/make.bat
100644 → 100755
Empty file.
Empty file modified docs/metrics.rst
100644 → 100755
Empty file.
Empty file modified docs/other_utils.rst
100644 → 100755
Empty file.
Empty file modified docs/pictures/APvsT1.jpg
100644 → 100755
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Empty file modified docs/pictures/T1_pipeline_steps.jpg
100644 → 100755
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Empty file modified docs/pictures/folder_structure.jpg
100644 → 100755
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Empty file modified docs/pictures/pipeline_steps.jpg
100644 → 100755
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Empty file modified docs/pictures/preproc_bet.jpg
100644 → 100755
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Empty file modified docs/pictures/preproc_gibbs.jpg
100644 → 100755
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Empty file modified docs/pictures/preproc_mppca.jpg
100644 → 100755
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Empty file modified docs/pictures/preprocessing.PNG
100644 → 100755
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Empty file modified docs/pictures/provisoire.png
100644 → 100755
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Empty file modified docs/pictures/root.png
100644 → 100755
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Empty file modified docs/pictures/root_reverse.png
100644 → 100755
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Empty file modified docs/preprocessing_T1.rst
100644 → 100755
Empty file.
Empty file modified docs/preprocessing_dmri.rst
100644 → 100755
Empty file.
Empty file modified docs/stats.rst
100644 → 100755
Empty file.
123 changes: 70 additions & 53 deletions elikopy/core.py

Large diffs are not rendered by default.

310 changes: 181 additions & 129 deletions elikopy/individual_subject_processing.py

Large diffs are not rendered by default.

51 changes: 35 additions & 16 deletions elikopy/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -575,6 +575,9 @@ def synb0DisCo(folder_path,topuppath,patient_path,starting_step=None,topup=True,
import torch.nn.functional as F
import torch.optim as optim

if starting_step == "None":
starting_step = None

assert starting_step in (None, "Registration", "Inference", "Apply", "topup")

synb0path = topuppath + "/synb0-DisCo"
Expand Down Expand Up @@ -678,7 +681,7 @@ def synb0DisCo(folder_path,topuppath,patient_path,starting_step=None,topup=True,
model_path = glob.glob(model_path)[0]
# Get model
model = UNet3D(2, 1).to(device)
model.load_state_dict(torch.load(model_path))
model.load_state_dict(torch.load(model_path,map_location=device))

# Inference
step3_log.write("[SynB0DISCO] " + datetime.datetime.now().strftime("%d.%b %Y %H:%M:%S") + ": Beginning of inference " + str(i) +"\n\n")
Expand Down Expand Up @@ -822,7 +825,7 @@ def inference(T1_path, b0_d_path, model, device):
return img_model


def regall_FA(folder_path, grp1, grp2, starting_state=None, registration_type="-T", postreg_type="-S", prestats_treshold=0.2, core_count=1):
def regall_FA(folder_path, grp1, grp2, starting_state=None, registration_type="-T", postreg_type="-S", prestats_treshold=0.2, core_count=1,patient_list=None):
""" Register all the subjects Fractional Anisotropy into a common space, skeletonisedd and non skeletonised. This is performed based on TBSS of FSL.
It is mandatory to have performed DTI prior to regall_FA.

Expand Down Expand Up @@ -853,8 +856,9 @@ def regall_FA(folder_path, grp1, grp2, starting_state=None, registration_type="-

# open the subject and is_control lists
dest_success = folder_path + "/subjects/subj_list.json"
with open(dest_success, 'r') as f:
patient_list = json.load(f)
if patient_list==None:
with open(dest_success, 'r') as f:
patient_list = json.load(f)
dest_subj_type = folder_path + "/subjects/subj_type.json"
with open(dest_subj_type, 'r') as f:
subj_type = json.load(f)
Expand Down Expand Up @@ -977,7 +981,7 @@ def regall_FA(folder_path, grp1, grp2, starting_state=None, registration_type="-
registration_log.flush()


def regall(folder_path, grp1, grp2, core_count=1 ,metrics_dic={'_noddi_odi':'noddi','_mf_fvf_tot':'mf','_diamond_kappa':'diamond'}):
def regall(folder_path, grp1, grp2, patient_list=None, core_count=1 ,metrics_dic={'_noddi_odi':'noddi','_mf_fvf_tot':'mf','_diamond_kappa':'diamond'}):
""" Register all the subjects diffusion metrics specified in the argument metrics_dic into a common space using the transformation computed for the FA with the regall_FA function. This is performed based on TBSS of FSL.
It is mandatory to have performed regall_FA prior to regall.

Expand All @@ -1004,8 +1008,9 @@ def regall(folder_path, grp1, grp2, core_count=1 ,metrics_dic={'_noddi_odi':'nod

# open the subject and is_control lists
dest_success = folder_path + "/subjects/subj_list.json"
with open(dest_success, 'r') as f:
patient_list = json.load(f)
if patient_list in (None,'None'):
with open(dest_success, 'r') as f:
patient_list = json.load(f)
dest_subj_type = folder_path + "/subjects/subj_type.json"
with open(dest_subj_type, 'r') as f:
subj_type = json.load(f)
Expand Down Expand Up @@ -1169,25 +1174,39 @@ def randomise_all(folder_path,randomise_numberofpermutation=5000,skeletonised=Tr
xmlName = [atlas_path + "/MNI.xml", atlas_path + "/HarvardOxford-Cortical.xml", atlas_path + "/HarvardOxford-Subcortical.xml", atlas_path + "/JHU-tracts.xml"]
atlases = [atlas_path + "/MNI/MNI-prob-1mm.nii.gz", atlas_path + "/HarvardOxford/HarvardOxford-cort-prob-1mm.nii.gz", atlas_path + "/HarvardOxford/HarvardOxford-sub-prob-1mm.nii.gz", atlas_path + "/JHU/JHU-ICBM-tracts-prob-1mm.nii.gz"]
name = ["MNI", "HarvardCortical", "HarvardSubcortical", "JHUWhiteMatterTractography"]
csv_or_xml = ["xml","xml","xml","xml"]
if additional_atlases:
xmlName = xmlName + list(map(list, zip(*list(additional_atlases.values()))))[0]
atlases = atlases + list(map(list, zip(*list(additional_atlases.values()))))[1]
csv_or_xml = csv_or_xml + list(map(list, zip(*list(additional_atlases.values()))))[2]
name = name + list(additional_atlases.keys())
# open the data
data, data_affine = load_nifti(outputdir + '/stats/all_' + key + '.nii.gz')

for iteration in range(len(atlases)):

# read the labels in xml file
x = etree.parse(xmlName[iteration])
labels = []
for elem in x.iter():
if elem.tag == 'label':
labels.append([elem.attrib['index'], elem.text])
labels = np.array(labels)[:, 1]


csv = csv_or_xml[iteration]=='csv'
if csv:
# read the labels in csv file
labels = pd.read_csv(xmlName[iteration],header=None,names=['index','Area'])
labels = labels['Area'].values
else:
# read the labels in xml file
x = etree.parse(xmlName[iteration])
labels = []
for elem in x.iter():
if elem.tag == 'label':
labels.append([elem.attrib['index'], elem.text])
labels = np.array(labels)[:, 1]

# open the atlas
atlas, atlas_affine = load_nifti(atlases[iteration])
if csv:
atlas_stack = np.zeros((np.shape(atlas)[0],np.shape(atlas)[1],np.shape(atlas)[2],int(len(np.unique(atlas.flatten())[1:]))))
for ivalue, value in enumerate(np.unique(atlas.flatten())[1:]):
atlas_stack[..., int(ivalue-1)] = atlas==value
# define a 3D image for each label and stack them together
atlas = atlas_stack

matrix = np.zeros((np.shape(data)[-1], np.shape(atlas)[-1]))
for i in range(np.shape(atlas)[-1]):
Expand Down