Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_IOI_DICTIONARY_loading(self):
my_dict = dict()
my_dict["dict_sample"] = (self.img, self.seg)
interface = Dictionary_interface(my_dict)
sample_list = interface.initialize("")
img = interface.load_image(sample_list[0])
seg = interface.load_segmentation(sample_list[0])
self.assertTrue(np.array_equal(img, self.img))
self.assertTrue(np.array_equal(seg, self.seg))
# NIFTI_interface - Loading and Storage of Predictions
def setUpClass(self):
np.random.seed(1234)
# Create 2D imgaging and segmentation data set
self.dataset2D = dict()
for i in range(0, 6):
img = np.random.rand(16, 16) * 255
self.img = img.astype(int)
seg = np.random.rand(16, 16) * 3
self.seg = seg.astype(int)
self.dataset2D["TEST.sample_" + str(i)] = (self.img, self.seg)
# Initialize Dictionary IO Interface
io_interface2D = Dictionary_interface(self.dataset2D, classes=3,
three_dim=False)
# Initialize temporary directory
self.tmp_dir2D = tempfile.TemporaryDirectory(prefix="tmp.miscnn.")
tmp_batches = os.path.join(self.tmp_dir2D.name, "batches")
# Initialize Data IO
self.data_io2D = Data_IO(io_interface2D,
input_path=os.path.join(self.tmp_dir2D.name),
output_path=os.path.join(self.tmp_dir2D.name),
batch_path=tmp_batches, delete_batchDir=False)
# Initialize Preprocessor
self.pp2D = Preprocessor(self.data_io2D, batch_size=2,
data_aug=None, analysis="fullimage")
# Get sample list
self.sample_list2D = self.data_io2D.get_indiceslist()
# Create 3D imgaging and segmentation data set
self.dataset3D = dict()
tmp_batches = os.path.join(self.tmp_dir2D.name, "batches")
# Initialize Data IO
self.data_io2D = Data_IO(io_interface2D, input_path="", output_path="",
batch_path=tmp_batches, delete_batchDir=False)
# Create 3D imgaging and segmentation data set
self.dataset3D = dict()
for i in range(0, 10):
img = np.random.rand(16, 16, 16) * 255
self.img = img.astype(int)
seg = np.random.rand(16, 16, 16) * 3
self.seg = seg.astype(int)
if i in range(8,10): sample = (self.img, None)
else : sample = (self.img, self.seg)
self.dataset3D["TEST.sample_" + str(i)] = sample
# Initialize Dictionary IO Interface
io_interface3D = Dictionary_interface(self.dataset3D, classes=3,
three_dim=True)
# Initialize temporary directory
self.tmp_dir3D = tempfile.TemporaryDirectory(prefix="tmp.miscnn.")
tmp_batches = os.path.join(self.tmp_dir3D.name, "batches")
# Initialize Data IO
self.data_io3D = Data_IO(io_interface3D, input_path="", output_path="",
batch_path=tmp_batches, delete_batchDir=False)
def test_SUBFUNCTIONS_postprocessing(self):
ds = dict()
for i in range(0, 10):
img = np.random.rand(16, 16, 16) * 255
img = img.astype(int)
seg = np.random.rand(16, 16, 16) * 3
seg = seg.astype(int)
sample = (img, seg)
ds["TEST.sample_" + str(i)] = sample
io_interface = Dictionary_interface(ds, classes=3, three_dim=True)
self.tmp_dir = tempfile.TemporaryDirectory(prefix="tmp.miscnn.")
tmp_batches = os.path.join(self.tmp_dir.name, "batches")
dataio = Data_IO(io_interface, input_path="", output_path="",
batch_path=tmp_batches, delete_batchDir=False)
sf = [Resize((9,9,9)), Normalization(), Clipping(min=-1.0, max=0.0)]
pp = Preprocessor(dataio, batch_size=1, prepare_subfunctions=False,
analysis="patchwise-grid", subfunctions=sf,
patch_shape=(4,4,4))
sample_list = dataio.get_indiceslist()
for index in sample_list:
sample = dataio.sample_loader(index)
for sf in pp.subfunctions:
sf.preprocessing(sample, training=False)
pp.cache["shape_" + str(index)] = sample.img_data.shape
sample.seg_data = np.random.rand(9, 9, 9) * 3
sample.seg_data = sample.seg_data.astype(int)
def test_IOI_DICTIONARY_predictionhandling(self):
my_dict = dict()
my_dict["dict_sample"] = (self.img, self.seg)
interface = Dictionary_interface(my_dict)
sample_list = interface.initialize("")
interface.save_prediction(self.seg, "dict_sample", "")
pred = interface.load_prediction("dict_sample", "")
self.assertTrue(np.array_equal(pred, self.seg))
def setUpClass(self):
# Create imgaging and segmentation data set
np.random.seed(1234)
self.dataset = dict()
for i in range(0, 10):
img = np.random.rand(16, 16, 16) * 255
self.img = img.astype(int)
seg = np.random.rand(16, 16, 16) * 3
self.seg = seg.astype(int)
if i == 3 : sample = (self.img, self.seg, self.seg)
elif i == 5 : sample = (self.img, None, self.seg)
else : sample = (self.img, self.seg)
self.dataset["TEST.sample_" + str(i)] = sample
# Initialize Dictionary IO Interface
self.io_interface = Dictionary_interface(self.dataset)
# Initialize temporary directory
self.tmp_dir = tempfile.TemporaryDirectory(prefix="tmp.miscnn.")
self.tmp_batches = os.path.join(self.tmp_dir.name, "batches")
def setUpClass(self):
np.random.seed(1234)
# Create 2D imgaging and segmentation data set
self.dataset = dict()
for i in range(0, 6):
img = np.random.rand(16, 16) * 255
self.img = img.astype(int)
seg = np.random.rand(16, 16) * 2
self.seg = seg.astype(int)
self.dataset["TEST.sample_" + str(i)] = (self.img, self.seg)
# Initialize Dictionary IO Interface
io_interface = Dictionary_interface(self.dataset, classes=3,
three_dim=False)
# Initialize temporary directory
self.tmp_dir = tempfile.TemporaryDirectory(prefix="tmp.miscnn.")
tmp_batches = os.path.join(self.tmp_dir.name, "batches")
# Initialize Data IO
self.data_io = Data_IO(io_interface,
input_path=os.path.join(self.tmp_dir.name),
output_path=os.path.join(self.tmp_dir.name),
batch_path=tmp_batches, delete_batchDir=False)
# Initialize Preprocessor
self.pp = Preprocessor(self.data_io, batch_size=2,
data_aug=None, analysis="fullimage")
# Initialize Neural Network
self.model = Neural_Network(self.pp)
# Get sample list
self.sample_list = self.data_io.get_indiceslist()
def setUpClass(self):
np.random.seed(1234)
# Create 2D imgaging and segmentation data set
self.dataset2D = dict()
for i in range(0, 10):
img = np.random.rand(16, 16) * 255
self.img = img.astype(int)
seg = np.random.rand(16, 16) * 2
self.seg = seg.astype(int)
self.dataset2D["TEST.sample_" + str(i)] = (self.img, self.seg)
# Initialize Dictionary IO Interface
io_interface2D = Dictionary_interface(self.dataset2D, classes=3,
three_dim=False)
# Initialize temporary directory
self.tmp_dir2D = tempfile.TemporaryDirectory(prefix="tmp.miscnn.")
tmp_batches = os.path.join(self.tmp_dir2D.name, "batches")
# Initialize Data IO
self.data_io2D = Data_IO(io_interface2D, input_path="", output_path="",
batch_path=tmp_batches, delete_batchDir=False)
# Create 3D imgaging and segmentation data set
self.dataset3D = dict()
for i in range(0, 10):
img = np.random.rand(16, 16, 16) * 255
self.img = img.astype(int)
seg = np.random.rand(16, 16, 16) * 3
self.seg = seg.astype(int)
if i in range(8,10): sample = (self.img, None)
else : sample = (self.img, self.seg)
batch_path=tmp_batches, delete_batchDir=False)
# Initialize Preprocessor
self.pp2D = Preprocessor(self.data_io2D, batch_size=2,
data_aug=None, analysis="fullimage")
# Get sample list
self.sample_list2D = self.data_io2D.get_indiceslist()
# Create 3D imgaging and segmentation data set
self.dataset3D = dict()
for i in range(0, 6):
img = np.random.rand(16, 16, 16) * 255
self.img = img.astype(int)
seg = np.random.rand(16, 16, 16) * 3
self.seg = seg.astype(int)
self.dataset3D["TEST.sample_" + str(i)] = (self.img, self.seg)
# Initialize Dictionary IO Interface
io_interface3D = Dictionary_interface(self.dataset3D, classes=3,
three_dim=True)
# Initialize temporary directory
self.tmp_dir3D = tempfile.TemporaryDirectory(prefix="tmp.miscnn.")
tmp_batches = os.path.join(self.tmp_dir3D.name, "batches")
# Initialize Data IO
self.data_io3D = Data_IO(io_interface3D,
input_path=os.path.join(self.tmp_dir3D.name),
output_path=os.path.join(self.tmp_dir3D.name),
batch_path=tmp_batches, delete_batchDir=False)
# Initialize Preprocessor
self.pp3D = Preprocessor(self.data_io3D, batch_size=2,
data_aug=None, analysis="fullimage")
# Get sample list
self.sample_list3D = self.data_io3D.get_indiceslist()
def setUpClass(self):
np.random.seed(1234)
# Create imgaging and segmentation data set
self.dataset = dict()
for i in range(0, 10):
img = np.random.rand(16, 16, 16) * 255
self.img = img.astype(int)
seg = np.random.rand(16, 16, 16) * 3
self.seg = seg.astype(int)
sample = (self.img, self.seg)
self.dataset["TEST.sample_" + str(i)] = sample
# Initialize Dictionary IO Interface
io_interface = Dictionary_interface(self.dataset, classes=3,
three_dim=True)
# Initialize temporary directory
self.tmp_dir = tempfile.TemporaryDirectory(prefix="tmp.miscnn.")
tmp_batches = os.path.join(self.tmp_dir.name, "batches")
# Initialize Data IO
self.data_io = Data_IO(io_interface, input_path="", output_path="",
batch_path=tmp_batches, delete_batchDir=False)
# Initialize Data Augmentation
self.data_aug = Data_Augmentation()
# Get sample list
self.sample_list = self.data_io.get_indiceslist()