Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
@tds_data_type(8, np.uint64)
class Uint64(StructType):
size = 8
struct_declaration = "Q"
@tds_data_type(9, np.single)
class SingleFloat(StructType):
size = 4
struct_declaration = "f"
@tds_data_type(10, np.double)
class DoubleFloat(StructType):
size = 8
struct_declaration = "d"
@tds_data_type(11, None)
class ExtendedFloat(TdmsType):
pass
@tds_data_type(0x19, np.single, set_np_type=False)
class SingleFloatWithUnit(StructType):
size = 4
struct_declaration = "f"
@tds_data_type(0x1A, np.double, set_np_type=False)
@tds_data_type(9, np.single)
class SingleFloat(StructType):
size = 4
struct_declaration = "f"
@tds_data_type(10, np.double)
class DoubleFloat(StructType):
size = 8
struct_declaration = "d"
@tds_data_type(11, None)
class ExtendedFloat(TdmsType):
pass
@tds_data_type(0x19, np.single, set_np_type=False)
class SingleFloatWithUnit(StructType):
size = 4
struct_declaration = "f"
@tds_data_type(0x1A, np.double, set_np_type=False)
class DoubleFloatWithUnit(StructType):
size = 8
struct_declaration = "d"
@tds_data_type(0x1B, None)
@tds_data_type(0x19, np.single, set_np_type=False)
class SingleFloatWithUnit(StructType):
size = 4
struct_declaration = "f"
@tds_data_type(0x1A, np.double, set_np_type=False)
class DoubleFloatWithUnit(StructType):
size = 8
struct_declaration = "d"
@tds_data_type(0x1B, None)
class ExtendedFloatWithUnit(TdmsType):
pass
@tds_data_type(0x20, None)
class String(TdmsType):
def __init__(self, value):
self.value = value
content = value.encode('utf-8')
length = _struct_pack('
struct_declaration = "f"
@tds_data_type(0x1A, np.double, set_np_type=False)
class DoubleFloatWithUnit(StructType):
size = 8
struct_declaration = "d"
@tds_data_type(0x1B, None)
class ExtendedFloatWithUnit(TdmsType):
pass
@tds_data_type(0x20, None)
class String(TdmsType):
def __init__(self, value):
self.value = value
content = value.encode('utf-8')
length = _struct_pack('
"""
if not os.path.isfile(path):
raise FileNotFoundError("file not found: %s" % path)
if isinstance(names, str):
names = [names]
elif type(names) in (list, tuple):
pass
elif names is None:
# if names not specified, get all names
names = read_names(path)
else:
raise TypeError("`names` must be str/list/tuple, got: %s" % type(names))
arrays = []
with TdmsFile.open(path) as f:
# memory efficient but maybe slower
for name in names:
# assuming an object hierarchy with depth 2 'group-channel'
assert len(name.split('\\')) == 2, f"Unable to parse group name and channel name from {name}."
group_name, channel_name = name.split('\\')
group = f[group_name]
channel = group[channel_name]
data = channel[:]
time = None
try:
# try to fetch time track defined by wf_start_time and wf_start_offset attributes
time = channel.time_track()
except KeyError:
def read_raw_data_index(self, f, raw_data_index_header):
if raw_data_index_header not in (FORMAT_CHANGING_SCALER, DIGITAL_LINE_SCALER):
raise ValueError(
"Unexpected raw data index for DAQmx data: 0x%08X" %
raw_data_index_header)
# This is a DAQmx raw data segment.
# 0x00001269 for segment containing Format Changing scaler.
# 0x0000126A for segment containing Digital Line scaler.
# Note that the NI docs on the TDMS format state that digital line scaler data
# has 0x00001369, which appears to be incorrect
# Read the data type
data_type_val = types.Uint32.read(f, self.endianness)
try:
self.data_type = types.tds_data_types[data_type_val]
except KeyError:
raise KeyError("Unrecognised data type: %s" % data_type_val)
daqmx_metadata = DaqMxMetadata(f, self.endianness, raw_data_index_header)
log.debug("DAQmx metadata: %r", daqmx_metadata)
self.data_type = daqmx_metadata.data_type
# DAQmx format has special chunking
self.data_size = daqmx_metadata.chunk_size * sum(daqmx_metadata.raw_data_widths)
self.number_values = daqmx_metadata.chunk_size
self.daqmx_metadata = daqmx_metadata
def read_property(f, endianness="<"):
""" Read a property from a segment's metadata """
prop_name = types.String.read(f, endianness)
prop_data_type = types.tds_data_types[types.Uint32.read(f, endianness)]
value = prop_data_type.read(f, endianness)
log.debug("Property '%s' = %r", prop_name, value)
return prop_name, value
def __init__(self, f, endianness, scaler_type):
"""
Read the metadata for a DAQmx raw segment. This is the raw
DAQmx-specific portion of the raw data index.
"""
self.scaler_type = scaler_type
self.data_type = types.tds_data_types[0xFFFFFFFF]
self.dimension = types.Uint32.read(f, endianness)
# In TDMS format version 2.0, 1 is the only valid value for dimension
if self.dimension != 1:
raise ValueError("Data dimension is not 1")
self.chunk_size = types.Uint64.read(f, endianness)
# size of vector of format changing scalers
scaler_vector_length = types.Uint32.read(f, endianness)
self.scalers = [
DaqMxScaler(f, endianness, scaler_type)
for _ in range(scaler_vector_length)]
# Read raw data widths.
# This is an array of widths in bytes, which should be the same
# for all channels that have DAQmx data in a segment.
# There is one element per acquisition card, as data is interleaved
# separately for each card.
raw_data_widths_length = types.Uint32.read(f, endianness)
self.raw_data_widths = np.zeros(raw_data_widths_length, dtype=np.int32)
for width_idx in range(raw_data_widths_length):
self.raw_data_widths[width_idx] = types.Uint32.read(f, endianness)
properties_list = ", ".join(properties)
return "%s(%s)" % (self.__class__.__name__, properties_list)
def _get_attr_repr(obj, attr_name):
val = getattr(obj, attr_name)
if isinstance(val, type):
return val.__name__
return repr(val)
# Type codes for DAQmx scalers don't match the normal TDMS type codes:
DAQMX_TYPES = {
0: types.Uint8,
1: types.Int8,
2: types.Uint16,
3: types.Int16,
4: types.Uint32,
5: types.Int32,
}
def read_property(f, endianness="<"):
""" Read a property from a segment's metadata """
prop_name = types.String.read(f, endianness)
prop_data_type = types.tds_data_types[types.Uint32.read(f, endianness)]
value = prop_data_type.read(f, endianness)
log.debug("Property '%s' = %r", prop_name, value)
return prop_name, value