Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_json_object_array(self):
for chunks in [1, 2, 5, 10, 100]:
n = 10
z = zarr.empty(
n, dtype=object, object_codec=numcodecs.JSON(), chunks=(chunks,)
)
for j in range(n):
z[j] = {str(k): k for k in range(j)}
self.filter_warnings_verify_round_trip({"z": z})
def test_empty_string_list(self):
z = zarr.empty(1, dtype=object, object_codec=numcodecs.JSON(), chunks=(2,))
z[0] = ["", ""]
self.filter_warnings_verify_round_trip({"z": z})
def __init__(
self,
path=None,
num_flush_threads=0,
compressor=DEFAULT_COMPRESSOR,
chunk_size=1024,
max_file_size=None,
):
self._mode = self.BUILD_MODE
self._num_flush_threads = num_flush_threads
self._chunk_size = max(1, chunk_size)
self._metadata_codec = numcodecs.JSON()
self._compressor = compressor
self.data = zarr.group()
self.path = path
if path is not None:
store = self._new_lmdb_store(max_file_size)
self.data = zarr.open_group(store=store, mode="w")
self.data.attrs[FORMAT_NAME_KEY] = self.FORMAT_NAME
self.data.attrs[FORMAT_VERSION_KEY] = self.FORMAT_VERSION
self.data.attrs["uuid"] = str(uuid.uuid4())
chunks = self._chunk_size
provenances_group = self.data.create_group("provenances")
provenances_group.create_dataset(
"timestamp",
shape=(0,),
chunks=chunks,