Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_build(self):
''' Test default mapping functionality when no attributes are nested '''
container = Bar('my_bar', list(range(10)), 'value1', 10)
builder = self.mapper.build(container, self.manager)
expected = GroupBuilder('my_bar', datasets={'data': DatasetBuilder('data', list(range(10)))},
attributes={'attr1': 'value1', 'attr2': 10})
self.assertDictEqual(builder, expected)
def test_overwrite(self):
db1 = DatasetBuilder('db1', [1, 2, 3])
db2 = DatasetBuilder('db2', [4, 5, 6])
db1.deep_update(db2)
self.assertListEqual(db1.data, db2.data)
def test_write_dataset_iterable(self):
self.io.write_dataset(self.f, DatasetBuilder('test_dataset', range(10), attributes={}))
dset = self.f['test_dataset']
self.assertListEqual(dset[:].tolist(), list(range(10)))
def test_write_table_nested(self):
b_cmpd_dt = np.dtype([('c', np.int32), ('d', np.float64)])
cmpd_dt = np.dtype([('a', np.int32), ('b', b_cmpd_dt)])
data = np.zeros(10, dtype=cmpd_dt)
data['a'][1] = 101
data['b']['c'] = 202
data['b']['d'] = 10.1
b_dt = [{'name': 'c', 'dtype': 'int32', 'doc': 'c column'},
{'name': 'd', 'dtype': 'float64', 'doc': 'd column'}]
dt = [{'name': 'a', 'dtype': 'int32', 'doc': 'a column'},
{'name': 'b', 'dtype': b_dt, 'doc': 'b column'}]
self.io.write_dataset(self.f, DatasetBuilder('test_dataset', data, attributes={}, dtype=dt))
dset = self.f['test_dataset']
self.assertEqual(dset['a'].tolist(), data['a'].tolist())
self.assertEqual(dset['b'].tolist(), data['b'].tolist())
def test_invalid_wrong_name_req_type(self):
bar_builder = GroupBuilder('bad_bar_name',
attributes={'data_type': 'Bar', 'attr1': 'a string attribute'},
datasets=[DatasetBuilder('data', 100, attributes={'attr2': 10})])
foo_builder = GroupBuilder('my_foo',
attributes={'data_type': 'Foo', 'foo_attr': text('example Foo object')},
groups=[bar_builder])
results = self.vmap.validate(foo_builder)
self.assertEqual(len(results), 1)
self.assertIsInstance(results[0], MissingDataType) # noqa: F405
self.assertEqual(results[0].data_type, 'Bar')
def test_intersecting_datasets(self):
gb1 = GroupBuilder('gb1', datasets={'dataset2': DatasetBuilder('dataset2', [1, 2, 3])})
gb2 = GroupBuilder('gb2', datasets={'dataset2': DatasetBuilder('dataset2', [4, 5, 6])})
gb1.deep_update(gb2)
self.assertIn('dataset2', gb1)
self.assertListEqual(gb1['dataset2'].data, gb2['dataset2'].data)
def test_write_dataset_iterable_multidimensional_array_compression(self):
a = np.arange(30).reshape(5, 2, 3)
aiter = iter(a)
daiter = DataChunkIterator.from_iterable(aiter, buffer_size=2)
wrapped_daiter = H5DataIO(data=daiter,
compression='gzip',
compression_opts=5,
shuffle=True,
fletcher32=True)
self.io.write_dataset(self.f, DatasetBuilder('test_dataset', wrapped_daiter, attributes={}))
dset = self.f['test_dataset']
self.assertEqual(dset.shape, a.shape)
self.assertListEqual(dset[:].tolist(), a.tolist())
self.assertEqual(dset.compression, 'gzip')
self.assertEqual(dset.compression_opts, 5)
self.assertEqual(dset.shuffle, True)
self.assertEqual(dset.fletcher32, True)
def test_no_overwrite(self):
db1 = DatasetBuilder('db1', [1, 2, 3])
db2 = DatasetBuilder('db2', [4, 5, 6], attributes={'attr1': 'va1'})
db1.deep_update(db2)
self.assertListEqual(db1.data, db2.data)
self.assertIn('attr1', db1.attributes)
def test_write_dataset_list_disable_default_compress(self):
with warnings.catch_warnings(record=True) as w:
a = H5DataIO(np.arange(30).reshape(5, 2, 3),
compression=False,
compression_opts=5)
self.assertEqual(len(w), 1) # We expect a warning that compression options are being ignored
self.assertFalse('compression_ops' in a.io_settings)
self.assertFalse('compression' in a.io_settings)
self.io.write_dataset(self.f, DatasetBuilder('test_dataset', a, attributes={}))
dset = self.f['test_dataset']
self.assertTrue(np.all(dset[:] == a.data))
self.assertEqual(dset.compression, None)
def test_pass_through_of_recommended_chunks(self):
class DC(DataChunkIterator):
def recommended_chunk_shape(self):
return (5, 1, 1)
dci = DC(data=np.arange(30).reshape(5, 2, 3))
wrapped_dci = H5DataIO(data=dci,
compression='gzip',
compression_opts=5,
shuffle=True,
fletcher32=True)
self.io.write_dataset(self.f, DatasetBuilder('test_dataset', wrapped_dci, attributes={}))
dset = self.f['test_dataset']
self.assertEqual(dset.chunks, (5, 1, 1))
self.assertEqual(dset.compression, 'gzip')
self.assertEqual(dset.compression_opts, 5)
self.assertEqual(dset.shuffle, True)
self.assertEqual(dset.fletcher32, True)