Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def _build():
with pytest.raises(FileNotFoundError):
CARE(None,basedir=str(tmpdir))
CARE(config,name='model',basedir=None)
with pytest.raises(ValueError):
CARE(None,basedir=None)
CARE(config,basedir=str(tmpdir)).export_TF()
with pytest.warns(UserWarning):
CARE(config,name='model',basedir=str(tmpdir))
CARE(config,name='model',basedir=str(tmpdir))
CARE(None,name='model',basedir=str(tmpdir))
if config.is_valid():
def _set_logdir(self):
self.logdir = self.basedir / self.name
config_file = self.logdir / 'config.json'
if self.config is None:
if config_file.exists():
config_dict = load_json(str(config_file))
self.config = self._config_class(**config_dict)
if not self.config.is_valid():
invalid_attr = self.config.is_valid(True)[1]
raise ValueError('Invalid attributes in loaded config: ' + ', '.join(invalid_attr))
else:
raise FileNotFoundError("config file doesn't exist: %s" % str(config_file.resolve()))
else:
if self.logdir.exists():
warnings.warn('output path for model already exists, files may be overwritten: %s' % str(self.logdir.resolve()))
self.logdir.mkdir(parents=True, exist_ok=True)
save_json(vars(self.config), str(config_file))
├── source1
│ ├── imageA.tif
│ └── imageB.tif
└── source2
├── imageA.tif
└── imageC.tif
>>> data = RawData.from_folder(basepath='data', source_dirs=['source1','source2'], target_dir='GT', axes='YX')
>>> n_images = data.size
>>> for source_x, target_y, axes, mask in data.generator():
... pass
"""
p = Path(basepath)
pairs = [(f, p/target_dir/f.name) for f in chain(*((p/source_dir).glob(pattern) for source_dir in source_dirs))]
len(pairs) > 0 or _raise(FileNotFoundError("Didn't find any images."))
consume(t.exists() or _raise(FileNotFoundError(t)) for s,t in pairs)
axes = axes_check_and_normalize(axes)
n_images = len(pairs)
description = "{p}: target='{o}', sources={s}, axes='{a}', pattern='{pt}'".format(p=basepath, s=list(source_dirs),
o=target_dir, a=axes, pt=pattern)
def _gen():
for fx, fy in pairs:
x, y = imread(str(fx)), imread(str(fy))
len(axes) >= x.ndim or _raise(ValueError())
yield x, y, axes[-x.ndim:], None
return RawData(_gen, n_images, description)
│ ├── imageA.tif
│ └── imageB.tif
└── source2
├── imageA.tif
└── imageC.tif
>>> data = RawData.from_folder(basepath='data', source_dirs=['source1','source2'], target_dir='GT', axes='YX')
>>> n_images = data.size
>>> for source_x, target_y, axes, mask in data.generator():
... pass
"""
p = Path(basepath)
pairs = [(f, p/target_dir/f.name) for f in chain(*((p/source_dir).glob(pattern) for source_dir in source_dirs))]
len(pairs) > 0 or _raise(FileNotFoundError("Didn't find any images."))
consume(t.exists() or _raise(FileNotFoundError(t)) for s,t in pairs)
axes = axes_check_and_normalize(axes)
n_images = len(pairs)
description = "{p}: target='{o}', sources={s}, axes='{a}', pattern='{pt}'".format(p=basepath, s=list(source_dirs),
o=target_dir, a=axes, pt=pattern)
def _gen():
for fx, fy in pairs:
x, y = imread(str(fx)), imread(str(fy))
len(axes) >= x.ndim or _raise(ValueError())
yield x, y, axes[-x.ndim:], None
return RawData(_gen, n_images, description)
def _set_logdir(self):
self.logdir = self.basedir / self.name
config_file = self.logdir / 'config.json'
if self.config is None:
if config_file.exists():
config_dict = load_json(str(config_file))
self.config = self._config_class(np.array([]), **config_dict)
if not self.config.is_valid():
invalid_attr = self.config.is_valid(True)[1]
raise ValueError('Invalid attributes in loaded config: ' + ', '.join(invalid_attr))
else:
raise FileNotFoundError("config file doesn't exist: %s" % str(config_file.resolve()))
else:
if self.logdir.exists():
warnings.warn('output path for model already exists, files may be overwritten: %s' % str(self.logdir.resolve()))
self.logdir.mkdir(parents=True, exist_ok=True)
save_json(vars(self.config), str(config_file))
value_manipulation=manipulator)
history = self.keras_model.fit_generator(generator=training_data, validation_data=(validation_X, validation_Y),
epochs=epochs, steps_per_epoch=steps_per_epoch,
callbacks=self.callbacks, verbose=1)
if self.basedir is not None:
self.keras_model.save_weights(str(self.logdir / 'weights_last.h5'))
if self.config.train_checkpoint is not None:
print()
self._find_and_load_weights(self.config.train_checkpoint)
try:
# remove temporary weights
(self.logdir / 'weights_now.h5').unlink()
except FileNotFoundError:
pass
return history