Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
""" Responsible for storing data read from TDMS files
"""
import tempfile
import numpy as np
from nptdms import types
from nptdms.timestamp import TimestampArray
from nptdms.log import log_manager
log = log_manager.get_logger(__name__)
def get_data_receiver(obj, num_values, raw_timestamps, memmap_dir=None):
"""Return a new channel data receiver to use for the given TDMS object
:param obj: TDMS channel object to receive data for
:param num_values: Number of values to be stored
:param raw_timestamps: Whether to store timestamp data as raw TDMS timestamps or a numpy datetime64 array
:param memmap_dir: Optional directory to store memory map files,
or None to not use memory map files
"""
if obj.data_type is None:
return None
if obj.data_type == types.DaqMxRawData:
return DaqmxDataReceiver(obj, num_values, memmap_dir)
import numpy as np
import numpy.polynomial.polynomial as poly
import re
from nptdms.log import log_manager
log = log_manager.get_logger(__name__)
RAW_DATA_INPUT_SOURCE = 0xFFFFFFFF
VOLTAGE_EXCITATION = 10322
CURRENT_EXCITATION = 10134
class LinearScaling(object):
""" Linear scaling with slope and intercept
"""
def __init__(self, intercept, slope, input_source):
self.intercept = intercept
self.slope = slope
self.input_source = input_source
@staticmethod
def from_properties(properties, scale_index):
from collections import defaultdict
import numpy as np
from nptdms import types
from nptdms.base_segment import (
BaseSegment, BaseSegmentObject, RawDataChunk, read_interleaved_segment_bytes)
from nptdms.log import log_manager
log = log_manager.get_logger(__name__)
FORMAT_CHANGING_SCALER = 0x00001269
DIGITAL_LINE_SCALER = 0x0000126A
class DaqmxSegment(BaseSegment):
""" A TDMS segment with DAQmx data
"""
def _new_segment_object(self, object_path):
return DaqmxSegmentObject(object_path, self.endianness)
def _get_chunk_size(self):
# For DAQmxRawData, each channel in a segment has the same number
# of values and contains the same raw data widths, so use
import os
import numpy as np
from nptdms import types
from nptdms.base_segment import (
BaseSegment,
BaseSegmentObject,
RawChannelDataChunk,
RawDataChunk,
read_interleaved_segment_bytes,
fromfile)
from nptdms.log import log_manager
log = log_manager.get_logger(__name__)
class InterleavedDataSegment(BaseSegment):
""" A TDMS segment with interleaved data
"""
__slots__ = []
def _new_segment_object(self, object_path):
return TdmsSegmentObject(object_path, self.endianness)
def _read_data_chunk(self, file, data_objects, chunk_index):
# If all data types are sized and all the lengths are
# the same, then we can read all data at once with numpy,
# which is much faster
all_sized = all(
from copy import copy
from io import UnsupportedOperation
import os
import struct
import numpy as np
from nptdms import types
from nptdms.common import toc_properties
from nptdms.log import log_manager
log = log_manager.get_logger(__name__)
_struct_unpack = struct.unpack
RAW_DATA_INDEX_NO_DATA = 0xFFFFFFFF
RAW_DATA_INDEX_MATCHES_PREVIOUS = 0x00000000
class BaseSegment(object):
""" Abstract base class for TDMS segments
"""
__slots__ = [
'position', 'num_chunks', 'ordered_objects', 'toc_mask',
'next_segment_offset', 'next_segment_pos',
'raw_data_offset', 'data_position', 'final_chunk_proportion',
'endianness', 'object_properties']
"""
import logging
import os
import numpy as np
from nptdms import types
from nptdms.common import ObjectPath, toc_properties
from nptdms.utils import Timer, OrderedDict
from nptdms.base_segment import RawChannelDataChunk
from nptdms.tdms_segment import ContiguousDataSegment, InterleavedDataSegment
from nptdms.daqmx import DaqmxSegment
from nptdms.log import log_manager
log = log_manager.get_logger(__name__)
class TdmsReader(object):
""" Reads metadata and data from a TDMS file.
:ivar object_metadata: Dictionary of object path to ObjectMetadata
"""
def __init__(self, tdms_file):
""" Initialise a new TdmsReader
:param tdms_file: Either the path to the tdms file to read
as a string or pathlib.Path, or an already opened file.
"""
self._segments = None
self._prev_segment_objects = {}
def main():
parser = ArgumentParser(
description="List the contents of a LabView TDMS file.")
parser.add_argument(
'-p', '--properties', action="store_true",
help="Include channel properties.")
parser.add_argument(
'-d', '--debug', action="store_true",
help="Print debugging information to stderr.")
parser.add_argument(
'tdms_file',
help="TDMS file to read.")
args = parser.parse_args()
if args.debug:
log_manager.set_level(logging.DEBUG)
tdmsinfo(args.tdms_file, args.properties)
from collections import defaultdict
import warnings
import numpy as np
from nptdms import scaling, types
from nptdms.utils import Timer, OrderedDict, cached_property
from nptdms.log import log_manager
from nptdms.common import ObjectPath
from nptdms.reader import TdmsReader
from nptdms.channel_data import get_data_receiver
from nptdms.export import hdf_export, pandas_export
from nptdms.base_segment import RawChannelDataChunk
from nptdms.timestamp import TdmsTimestamp, TimestampArray
log = log_manager.get_logger(__name__)
# Have to get a reference to the builtin property decorator
# so we can use it in TdmsObject, which has a property method.
_property_builtin = property
class TdmsFile(object):
""" Reads and stores data from a TDMS file.
There are two main ways to create a new TdmsFile object.
TdmsFile.read will read all data into memory::
tdms_file = TdmsFile.read(tdms_file_path)
or you can use TdmsFile.open to read file metadata but not immediately read all data,