Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
"_source": self._source,
"_size": self._size,
"_initial_version": self._initial_version,
"tree": self._generate_tree()
}
# ensure that there are no ndarray proxies which cannot be pickled
queue = [(None, None, state["tree"])]
while queue:
parent, key, element = queue.pop()
if isinstance(element, dict):
for key, val in element.items():
queue.append((element, key, val))
elif isinstance(element, (list, tuple)):
for i, child in enumerate(element):
queue.append((element, i, child))
elif isinstance(element, asdf.tags.core.ndarray.NDArrayType):
parent[key] = element.__array__()
return state
self._serial_write(fd, pad_blocks, include_block_index)
fd.truncate()
return
# Estimate how big the tree will be on disk by writing the
# YAML out in memory. Since the block indices aren't yet
# known, we have to count the number of block references and
# add enough space to accommodate the largest block number
# possible there.
tree_serialized = io.BytesIO()
self._write_tree(self._tree, tree_serialized, pad_blocks=False)
array_ref_count = [0]
from .tags.core.ndarray import NDArrayType
for node in treeutil.iter_tree(self._tree):
if (isinstance(node, (np.ndarray, NDArrayType)) and
self.blocks[node].array_storage == 'internal'):
array_ref_count[0] += 1
serialized_tree_size = (
tree_serialized.tell() +
constants.MAX_BLOCKS_DIGITS * array_ref_count[0])
if not block.calculate_updated_layout(
self.blocks, serialized_tree_size,
pad_blocks, fd.block_size):
# If we don't have any blocks that are being reused, just
# write out in a serial fashion.
self._serial_write(fd, pad_blocks, include_block_index)
fd.truncate()
return
def assert_equal(cls, old, new):
assert old.meta == new.meta
NDArrayType.assert_equal(np.array(old), np.array(new))
# Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
from .tags.core import ndarray
class Stream(ndarray.NDArrayType):
"""
Used to put a streamed array into the tree.
Examples
--------
Save a double-precision array with 1024 columns, one row at a
time::
>>> from asdf import AsdfFile, Stream
>>> import numpy as np
>>> ff = AsdfFile()
>>> ff.tree['streamed'] = Stream([1024], np.float64)
>>> with open('test.asdf', 'wb') as fd:
... ff.write_to(fd)
... for i in range(200):
... nbytes = fd.write(
"Mismatch in number of columns: "
"Expected {0}, got {1}".format(
len(datatype), len(in_datatype)))
for i in range(len(np_datatype.fields)):
in_type = np_in_datatype[i]
out_type = np_datatype[i]
if not np.can_cast(in_type, out_type, 'safe'):
yield ValidationError(
"Can not safely cast to expected datatype: "
"Expected {0}, got {1}".format(
numpy_dtype_to_asdf_datatype(out_type)[0],
numpy_dtype_to_asdf_datatype(in_type)[0]))
NDArrayType.validators = {
'ndim': validate_ndim,
'max_ndim': validate_max_ndim,
'datatype': validate_datatype
}
def assert_equal(cls, old, new):
from .ndarray import NDArrayType
assert old.meta == new.meta
assert old.description == new.description
assert old.unit == new.unit
NDArrayType.assert_equal(np.array(old), np.array(new))
def find_or_create_block_for_array(self, arr, ctx):
from .tags.core import ndarray
if not isinstance(arr, ndarray.NDArrayType):
base = util.get_array_base(arr)
for hdu in self._hdulist:
if hdu.data is None:
continue
if base is util.get_array_base(hdu.data):
return _FitsBlock(hdu)
return super(
_EmbeddedBlockManager, self).find_or_create_block_for_array(arr, ctx)
def find_or_create_block_for_array(self, arr, ctx):
from .tags.core import ndarray
if not isinstance(arr, ndarray.NDArrayType):
base = util.get_array_base(arr)
for hdu in self._hdulist:
if base is hdu.data:
return _FitsBlock(hdu)
return super(
_EmbeddedBlockManager, self).find_or_create_block_for_array(arr, ctx)
def assert_equal(cls, old, new):
assert old.meta == new.meta
try:
NDArrayType.assert_equal(np.array(old), np.array(new))
except (AttributeError, TypeError, ValueError):
for col0, col1 in zip(old, new):
try:
NDArrayType.assert_equal(np.array(col0), np.array(col1))
except (AttributeError, TypeError, ValueError):
assert col0 == col1
def assert_equal(cls, old, new):
assert old.meta == new.meta
try:
NDArrayType.assert_equal(np.array(old), np.array(new))
except (AttributeError, TypeError, ValueError):
for col0, col1 in zip(old, new):
try:
NDArrayType.assert_equal(np.array(col0), np.array(col1))
except (AttributeError, TypeError, ValueError):
assert col0 == col1