Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def _read_bucket(self, doc, column_set, column_dtypes, include_symbol, include_images, columns):
rtn = {}
if doc[VERSION] != 3:
raise ArcticException("Unhandled document version: %s" % doc[VERSION])
# np.cumsum copies the read-only array created with frombuffer
rtn[INDEX] = np.cumsum(np.frombuffer(lz4_decompress(doc[INDEX]), dtype='uint64'))
doc_length = len(rtn[INDEX])
column_set.update(doc[COLUMNS].keys())
# get the mask for the columns we're about to load
union_mask = np.zeros((doc_length + 7) // 8, dtype='uint8')
for c in column_set:
try:
coldata = doc[COLUMNS][c]
# the or below will make a copy of this read-only array
mask = np.frombuffer(lz4_decompress(coldata[ROWMASK]), dtype='uint8')
union_mask = union_mask | mask
except KeyError:
rtn[c] = None
union_mask = np.unpackbits(union_mask)[:doc_length].astype('bool')
rtn_length = np.sum(union_mask)
rtn[INDEX] = rtn[INDEX][union_mask]
if include_symbol:
rtn['SYMBOL'] = [doc[SYMBOL], ] * rtn_length
# Unpack each requested column in turn
for c in column_set:
try:
coldata = doc[COLUMNS][c]
dtype = np.dtype(coldata[DTYPE])
# values ends up being copied by pandas before being returned to the user. However, we
if dictionary:
if isinstance(dictionary, tuple):
d = x[dictionary[0]:dictionary[1]]
else:
d = dictionary
c_kwargs['dict'] = d
d_kwargs['dict'] = d
c = lz4.block.compress(x, **c_kwargs)
if c_kwargs['store_size']:
assert get_stored_size(c) == len(x)
else:
d_kwargs['uncompressed_size'] = len(x)
return lz4.block.decompress(c, **d_kwargs)
def test_decompress_without_leak():
# Verify that hand-crafted packet does not leak uninitialized(?) memory.
data = lz4.block.compress(b'A' * 64)
message = r'^Decompressor wrote 64 bytes, but 79 bytes expected from header$'
with pytest.raises(lz4.block.LZ4BlockError, match=message):
lz4.block.decompress(b'\x4f' + data[1:])
def _get_status(r):
queues = []
for key in r.keys("queues~*~*~*"):
_, owner, repo, branch = key.split("~")
updated_at = None
payload = r.get(key)
if payload:
try:
pulls = ujson.loads(payload)
except Exception:
# Old format
payload = lz4.block.decompress(payload)
pulls = ujson.loads(payload)
updated_at = list(sorted([p["updated_at"] for p in pulls]))[-1]
queues.append({
"owner": owner,
"repo": repo,
"branch": branch,
"pulls": pulls,
"updated_at": updated_at,
})
return ujson.dumps(queues)
for dt_type in frame:
dataset = frame[dt_type]
img = dataset[...]
if dt_type == "camera":
camera = ({
"extrinsic": [[img[5],img[8],img[11],img[14]], [img[6],img[9],img[12],img[15]], [img[7],img[10],img[13],img[16]], [0.0,0.0,0.0,1.0]],
"f_x": img[0],
"f_y": img[1],
"c_x": img[3],
"c_y": img[4]
})
with open(os.path.join(path, dataset_name, "{:04d}".format(seq_idx), "poses", "{:04d}.json".format(f_idx)), "w") as output_file:
json.dump(camera, output_file)
elif dt_type == "depth":
dimension = dataset.attrs["extents"]
depth = np.array(np.frombuffer(decompress(img.tobytes(), dimension[0] * dimension[1] * 2), dtype = np.float16)).astype(np.float32)
depth = depth.reshape(dimension[0], dimension[1])
imageio.imwrite(os.path.join(path, dataset_name, "{:04d}".format(seq_idx), "depths", "{:04d}.exr".format(f_idx)), depth, flags = freeimage.IO_FLAGS.EXR_ZIP)
elif dt_type == "image":
img = imageio.imread(img.tobytes(), format = "RAW-FI")
imageio.imwrite(os.path.join(path, dataset_name, "{:04d}".format(seq_idx), "images", "{:04d}.png".format(f_idx)), img)
with open(os.path.join(path, dataset_name, "num_images.json"), "w") as output_file:
json.dump(num_images, output_file)
except ImportError:
botengine.get_logger().error("Attempted to import 'lz4' to uncompress the data request response, but lz4 is not available. Please add 'lz4' to 'pip_install_remotely' in your structure.json.")
pass
if imported:
for d in data:
reference = None
if 'key' in d:
reference = d['key']
if reference not in events:
events[reference] = {}
botengine.get_logger().info("Downloading {} ({} bytes)...".format(d['deviceId'], d['compressedLength']))
r = botengine._requests.get(d['url'], timeout=60, stream=True)
events[reference][controller.get_device(d['deviceId'])] = lz4.block.decompress(r.content, uncompressed_size=d['dataLength'])
for reference in events:
controller.data_request_ready(botengine, reference, events[reference])
# DO NOT SAVE CORE VARIABLES HERE.
return
else:
botengine.get_logger().error("bot.py: Unknown trigger {}".format(trigger_type))
# Always save your variables!
botengine.save_variable("controller", controller, required_for_each_execution=True)
botengine.get_logger().info("<< bot")
if data_object.object_flags & self._OBJECT_COMPRESSED_FLAG_XZ:
data = lzma.decompress(data)
elif data_object.object_flags & self._OBJECT_COMPRESSED_FLAG_LZ4:
uncompressed_size_map = self._GetDataTypeMap('uint32le')
try:
uncompressed_size = self._ReadStructureFromByteStream(
data, file_offset + 64, uncompressed_size_map)
except (ValueError, errors.ParseError) as exception:
raise errors.ParseError((
'Unable to parse LZ4 uncompressed size at offset: 0x{0:08x} with '
'error: {1!s}').format(file_offset + 64, exception))
data = lz4.block.decompress(
data[8:], uncompressed_size=uncompressed_size)
return data
shape: Optional[Tuple[int]] = None):
"""
Converts lz4 compressed string to 1d array.
Args:
string: Serialized array.
dtype: Data type of original array.
shape: Shape of original array.
Returns:
N-dimensional numpy array.
"""
if len(string) == 0:
return np.zeros((0, ), dtype=dtype)
try:
arr_1d = np.frombuffer(decompress(string), dtype=dtype)
except TypeError: # python3 compatibility
arr_1d = np.frombuffer(decompress(str.encode(string)), dtype=dtype)
if shape is not None:
arr_1d = arr_1d.reshape(shape)
return arr_1d
def CreateFromNetworkBytes( network_string ):
try:
obj_bytes = zlib.decompress( network_string )
except zlib.error:
if LZ4_OK:
obj_bytes = lz4.block.decompress( network_string )
else:
raise
obj_string = str( obj_bytes, 'utf-8' )
return CreateFromString( obj_string )
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE
# OR PERFORMANCE OF THIS SOFTWARE.
# nxo64.py: IDA loader (and library for reading nso/nro files)
from __future__ import print_function
import gzip, math, os, re, struct, sys
from struct import unpack as up, pack as pk
from io import BytesIO
import lz4.block
uncompress = lz4.block.decompress
if sys.version_info[0] == 3:
iter_range = range
int_types = (int,)
ascii_string = lambda b: b.decode('ascii')
bytes_to_list = lambda b: list(b)
list_to_bytes = lambda l: bytes(l)
else:
iter_range = xrange
int_types = (int, long)
ascii_string = lambda b: str(b)
bytes_to_list = lambda b: map(ord, b)
list_to_bytes = lambda l: ''.join(map(chr, l))
def kip1_blz_decompress(compressed):
compressed_size, init_index, uncompressed_addl_size = struct.unpack('