Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def save():
saving_streamlines = Streamlines()
for bundle in self.cla.keys():
if bundle.GetVisibility():
t = self.cla[bundle]['tractogram']
c = self.cla[bundle]['cluster']
indices = self.tractogram_clusters[t][c]
saving_streamlines.extend(Streamlines(indices))
print('Saving result in tmp.trk')
# Using the header of the first of the tractograms
sft_new = StatefulTractogram(saving_streamlines,
self.tractograms[0],
Space.RASMM)
save_tractogram(sft_new, 'tmp.trk', bbox_valid_check=False)
print('Saved!')
rejected_streamlines.extend(range(len_accepted - missing,
len_accepted))
if missing > 0:
logging.info('{} clusters automatically rejected'
'from early exit'.format(missing))
# Save accepted clusters (by GUI)
accepted_streamlines = save_clusters(sft_accepted_on_size,
accepted_streamlines,
args.out_accepted_dir,
filename_accepted_on_size)
accepted_sft = StatefulTractogram(accepted_streamlines,
sft_accepted_on_size[0],
Space.RASMM)
save_tractogram(accepted_sft, args.out_accepted, bbox_valid_check=False)
# Save rejected clusters (by GUI)
rejected_streamlines = save_clusters(sft_accepted_on_size,
rejected_streamlines,
args.out_rejected_dir,
filename_accepted_on_size)
# Save rejected clusters (by size)
rejected_streamlines.extend(save_clusters(sft_rejected_on_size,
range(len(sft_rejected_on_size)),
args.out_rejected_dir,
filename_rejected_on_size))
rejected_sft = StatefulTractogram(rejected_streamlines,
sft_accepted_on_size[0],
Space.RASMM)
parser.error(
'Zero or one streamline in {}'.format(args.in_tractogram) +
'. The file must have more than one streamline.')
if len(ids_c) > 0:
sft_c = filter_tractogram_data(tractogram, ids_c)
save_tractogram(sft_c, args.out_tractogram)
else:
logging.warning(
'No clean streamlines in {}'.format(args.in_tractogram))
if len(ids_l) == 0:
logging.warning('No loops in {}'.format(args.in_tractogram))
elif args.looping_tractogram:
sft_l = filter_tractogram_data(tractogram, ids_l)
save_tractogram(sft_l, args.looping_tractogram)
the ``save_tractogram`` function will save a valid file. And then the function
``load_tractogram`` will load them in a valid state.
"""
cc_sft = StatefulTractogram(cc_streamlines_vox, reference_anatomy, Space.VOX)
laf_sft = StatefulTractogram(laf_streamlines_vox, reference_anatomy, Space.VOX)
raf_sft = StatefulTractogram(raf_streamlines_vox, reference_anatomy, Space.VOX)
lpt_sft = StatefulTractogram(lpt_streamlines_vox, reference_anatomy, Space.VOX)
rpt_sft = StatefulTractogram(rpt_streamlines_vox, reference_anatomy, Space.VOX)
print(len(cc_sft), len(laf_sft), len(raf_sft), len(lpt_sft), len(rpt_sft))
save_tractogram(cc_sft, 'cc_1000.trk')
save_tractogram(laf_sft, 'laf_1000.trk')
save_tractogram(raf_sft, 'raf_1000.trk')
save_tractogram(lpt_sft, 'lpt_1000.trk')
save_tractogram(rpt_sft, 'rpt_1000.trk')
nib.save(nib.Nifti1Image(cc_density, affine, nifti_header),
'cc_density.nii.gz')
nib.save(nib.Nifti1Image(laf_density, affine, nifti_header),
'laf_density.nii.gz')
nib.save(nib.Nifti1Image(raf_density, affine, nifti_header),
'raf_density.nii.gz')
nib.save(nib.Nifti1Image(lpt_density, affine, nifti_header),
'lpt_density.nii.gz')
nib.save(nib.Nifti1Image(rpt_density, affine, nifti_header),
'rpt_density.nii.gz')
np.linalg.inv(adjusted_affine)), np.linalg.inv(warped_fa_img.affine)), np.eye(4), brain_mask, include=True))
# Remove streamlines with negative voxel indices
lin_T, offset = _mapping_to_voxel(np.eye(4))
streams_final_filt_final = []
for sl in streams_final_filt:
inds = np.dot(sl, lin_T)
inds += offset
if not inds.min().round(decimals=6) < 0:
streams_final_filt_final.append(sl)
# Save streamlines
stf = StatefulTractogram(streams_final_filt_final, reference=warped_fa_img, space=Space.RASMM, shifted_origin=True)
stf.remove_invalid_streamlines()
streams_final_filt_final = stf.streamlines
save_tractogram(stf, streams_mni, bbox_valid_check=True)
warped_fa_img.uncache()
# DSN QC plotting
# plot_gen.show_template_bundles(streams_final_filt_final, template_path, streams_warp_png)
# Create and save MNI density map
nib.save(nib.Nifti1Image(utils.density_map(streams_final_filt_final, affine=np.eye(4),
vol_dims=warped_fa_shape), warped_fa_affine), density_mni)
# Map parcellation from native space back to MNI-space and create an 'uncertainty-union' parcellation
# with original mni-space uatlas
uatlas_mni_img = nib.load(uatlas)
warped_uatlas = affine_map.transform_inverse(mapping.transform(np.asarray(atlas_img.dataobj).astype('int'),
interpolation='nearestneighbour'), interp='nearest')
atlas_img.uncache()
sft = load_tractogram_with_reference(parser, args, args.in_tractogram)
smoothed_streamlines = []
for streamline in sft.streamlines:
if args.gaussian:
tmp_streamlines = smooth_line_gaussian(streamline, args.gaussian)
else:
tmp_streamlines = smooth_line_spline(streamline, args.spline[0],
args.spline[1])
if args.error_rate:
smoothed_streamlines.append(compress_streamlines(tmp_streamlines,
args.error_rate))
smoothed_sft = StatefulTractogram.from_sft(smoothed_streamlines, sft,
data_per_streamline=sft.data_per_streamline)
save_tractogram(smoothed_sft, args.out_tractogram)
args = parser.parse_args()
assert_inputs_exist(parser, args.in_bundle)
assert_outputs_exist(parser, args, args.out_centroid)
if args.nb_points < 2:
parser.error('--nb_points {} should be >= 2'.format(args.nb_points))
sft = load_tractogram_with_reference(parser, args, args.in_bundle)
centroid_streamlines = get_streamlines_centroid(sft.streamlines,
args.nb_points)
sft = StatefulTractogram.from_sft(centroid_streamlines, sft)
save_tractogram(sft, args.out_centroid)
sc_af = len(new_sft.streamlines)
print(json.dumps({'streamline_count_before_filtering': int(sc_bf),
'streamline_count_after_filtering': int(sc_af)},
indent=args.indent))
if len(new_sft.streamlines) == 0:
if args.no_empty:
logging.debug("The file {} won't be written "
"(0 streamline).".format(args.out_tractogram))
return
logging.debug('The file {} contains 0 streamline'.format(
args.out_tractogram))
save_tractogram(new_sft, args.out_tractogram)
if args.out_pickle:
with open(args.out_pickle, 'wb') as outfile:
pickle.dump(reco_obj.cluster_map, outfile)
_, indices = reco_obj.recognize(ArraySequence(model_streamlines),
args.model_clustering_thr,
pruning_thr=args.pruning_thr,
slr_num_threads=args.slr_threads)
new_streamlines = wb_streamlines[indices]
new_data_per_streamlines = wb_file.data_per_streamline[indices]
new_data_per_points = wb_file.data_per_point[indices]
if not args.no_empty or new_streamlines:
sft = StatefulTractogram(new_streamlines, wb_file, Space.RASMM,
data_per_streamline=new_data_per_streamlines,
data_per_point=new_data_per_points)
save_tractogram(sft, args.out_tractogram)
args.processes = 1
last_iteration = True
else:
logging.debug('Threshold of convergence was not achieved.'
' Need another run...\n')
args.min_cluster_size = 1
# Once the streamlines reached a low enough amount, switch to
# single thread for full comparison
if len(streamlines) < 10000:
args.processes = 1
random.shuffle(streamlines)
# After convergence, we can simply save the output
new_sft = StatefulTractogram.from_sft(streamlines, sft)
save_tractogram(new_sft, args.out_bundle)