How to use the tables.File function in tables

To help you get started, we’ve selected a few tables examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github ver228 / tierpsy-tracker / work_in_progress / ContrastMaps_analysis / fixHTorientation_linearmap_part2.py View on Github external
import os
import time


import matplotlib.pylab as plt
from sklearn.cluster import KMeans
from scipy.spatial.distance import cdist

segworm_file = '/Users/ajaver/Desktop/Gecko_compressed/20150511/Trajectories/Capture_Ch1_11052015_195105_segworm.hdf5';
contrastmap_file = '/Users/ajaver/Desktop/Gecko_compressed/20150511/Trajectories/Capture_Ch1_11052015_195105_lmap.hdf5';
 
cmap_fid = pd.HDFStore(contrastmap_file, 'r');
block_index = cmap_fid['/block_index'];
cmap_fid.close()

contrastmap_fid = tables.File(contrastmap_file, 'r');
segworm_fid = tables.File(segworm_file, 'r');

lmaps_data = contrastmap_fid.get_node('/block_lmap')
#%%
tic_ini = time.time()
worm_ids = [8]#block_index['worm_index_joined'].unique();
for ii_worm, worm_id in enumerate(worm_ids):
    tic = time.time()
    worm_block = block_index[block_index['worm_index_joined']==worm_id]
    assert np.all(np.diff(worm_block['lmap_id']) == 1)
    block_range = (worm_block['lmap_id'].iloc[0], worm_block['lmap_id'].iloc[-1])
    block_maps = lmaps_data[block_range[0]:block_range[1]+1,:,:]
github ver228 / tierpsy-tracker / work_in_progress / ContrastMaps_analysis / fixHTorientation.py View on Github external
ind_list_ini.append((key1,key2, ind))
        
        for ind in all_ind_last[key1][key2]:
            ind_list_last.append((key1,key2, ind))

#this sorting should speed up reading operations for h5py 
ind_list_ini = sorted(ind_list_ini, key = lambda indexes : indexes[0]);
ind_list_last = sorted(ind_list_last, key = lambda indexes : indexes[0]);
#%%
ind_list = zip(*ind_list_ini)[-1]+zip(*ind_list_last)[-1]
ind_list, cmap_ind  = np.unique(ind_list, return_inverse=True)

ind_list_ini = zip(*ind_list_ini)+[tuple(cmap_ind[:len(ind_list_ini)])]
ind_list_last = zip(*ind_list_last)+[tuple(cmap_ind[len(ind_list_last):])]
##
contrastmap_fid = tables.File(contrastmap_file, 'w');

ind_block_table = contrastmap_fid.create_table('/', 'block_index', {
    'worm_index_joined':    tables.Int32Col(pos=0),
    'block_id'         : tables.Int32Col(pos=1),
    'plate_worms_id'   : tables.Int32Col(pos=2),
    'segworm_id'         : tables.Int32Col(pos=3)
    }, filters = tables.Filters(complevel=5, complib='zlib', shuffle=True))

ind_ini_table = contrastmap_fid.create_table('/', 'block_initial', {
    'worm_index_joined':    tables.Int32Col(pos=0),
    'block_id'         : tables.Int32Col(pos=1),
    'plate_worms_id'   : tables.Int32Col(pos=2),
    'cmap_id'         : tables.Int32Col(pos=3)
    }, filters = tables.Filters(complevel=5, complib='zlib', shuffle=True))
ind_last_table = contrastmap_fid.create_table('/', 'block_last', {
    'worm_index_joined':    tables.Int32Col(pos=0),
github ver228 / tierpsy-tracker / work_in_progress / ContrastMaps_analysis / fixHTorientation_tables_all_part2.py View on Github external
#%%
#contrastmap_file = '/Users/ajaver/Desktop/Gecko_compressed/20150323/Trajectories/CaptureTest_90pc_Ch1_02022015_141431_cmap-short.hdf5';
contrastmap_file = '/Users/ajaver/Desktop/Gecko_compressed/20150323/Trajectories/CaptureTest_90pc_Ch1_02022015_141431_cmap.hdf5';
segworm_file = '/Users/ajaver/Desktop/Gecko_compressed/20150323/Trajectories/CaptureTest_90pc_Ch1_02022015_141431_segworm.hdf5';

#contrastmap_fid = h5py.File(contrastmap_file, 'r');

segworm_file_fix = segworm_file[:-5] + '_fix2' + segworm_file[-5:];
os.system('cp "%s" "%s"' % (segworm_file, segworm_file_fix))

cmap_fid = pd.HDFStore(contrastmap_file, 'r');
block_index = cmap_fid['/block_index'];
cmap_fid.close()

contrastmap_fid = tables.File(contrastmap_file, 'r');
segworm_fid = h5py.File(segworm_file_fix, 'r+');


#%%
worm_id= 3;

worm_block = block_index[block_index['worm_index_joined']==worm_id]

block_count = worm_block['block_id'].value_counts();
block_order = block_count.index;

skeleton_std = np.zeros((block_order.max(), segworm_fid['/segworm_results/skeleton'].shape[2]-1))

for ii, bb in enumerate(block_order):
    good = worm_block['block_id'] == bb;
    segworm_id = worm_block.loc[good, 'segworm_id'].values
github ver228 / tierpsy-tracker / _old / GUI_QT5 / _old / MWTrackerViewer_old / MWTrackerViewer_GUI_old.py View on Github external
def saveData(self):

        # pytables saving format is more convenient than pandas

        # convert data into a rec array to save into pytables
        trajectories_recarray = self.trajectories_data.to_records(index=False)

        with tables.File(self.skel_file, "r+") as ske_file_id:
            # pytables filters.
            table_filters = tables.Filters(
                complevel=5, complib='zlib', shuffle=True, fletcher32=True)

            newT = ske_file_id.create_table(
                '/',
                'trajectories_data_d',
                obj=trajectories_recarray,
                filters=table_filters)

            ske_file_id.remove_node('/', 'trajectories_data')
            newT.rename('trajectories_data')
github ver228 / tierpsy-tracker / tierpsy / analysis / feat_init / smooth_skeletons_table.py View on Github external
# display progress
        dd = " Smoothing skeletons. Worm %i of %i done." % (n, tot_worms)
        print_flush(
            base_name +
            dd +
            ' Total time:' +
            progress_timer.get_time_str())
    
    
    _display_progress(0)
    #%%
    
    
    #initialize arrays
    food_cnt = read_food_contour(skeletons_file)
    with tables.File(skeletons_file, 'r') as fid:
        n_segments = fid.get_node('/skeleton').shape[1]
    
    with tables.File(features_file, 'w') as fid_features:
        if food_cnt is not None:
            fid_features.create_array('/', 'food_cnt_coord', obj=food_cnt.astype(np.float32))
        
        worm_coords_array = {}
        w_node = fid_features.create_group('/', 'coordinates')
        for array_name in ['skeletons', 'dorsal_contours', 'ventral_contours', 'widths']:
            if array_name != 'widths':  
                a_shape = (0, n_segments, 2)
            else:
                a_shape = (0, n_segments)
            
            worm_coords_array[array_name] = fid_features.create_earray(
                w_node,
github ver228 / tierpsy-tracker / MWTracker / trackWorms / getWormTrajectories.py View on Github external
'''
    #read images from 'masked_image_file', and save the linked trajectories and their features into 'trajectories_file'
    #use the first 'total_frames' number of frames, if it is equal -1, use all the frames in 'masked_image_file'
    min_area -- min area of the segmented worm
    min_length -- min size of the bounding box in the ROI of the compressed image
    max_allowed_dist -- maximum allowed distance between to consecutive trajectories
    area_ratio_lim -- allowed range between the area ratio of consecutive frames 
    is_single_worm -- filter

    ''' 

    #check that the mask file is correct
    if not os.path.exists(masked_image_file):
        raise Exception('HDF5 Masked Image file does not exists.')
    
    with tables.File(masked_image_file, 'r') as mask_fid:
        mask_dataset = mask_fid.get_node("/mask")
        if not mask_dataset._v_attrs['has_finished'] >= 1:
            raise Exception('HDF5 Masked Image was not finished correctly.')
        if mask_dataset.shape[0] == 0:
            raise Exception('Empty set in masked image file. Nothing to do here.')
            
    #intialize variables
    base_name = masked_image_file.rpartition('.')[0].rpartition(os.sep)[-1]
    progress_str = '####'

    
    #read timestamps from the masked_image_file 
    timestamp, timestamp_time = getTimestamp(masked_image_file)

    with tables.File(masked_image_file, 'r') as mask_fid, \
    tables.open_file(trajectories_file, mode = 'w') as feature_fid:
github PyTables / PyTables / tables / nodes / filenode.py View on Github external
where, name
      Location of the filenode where the data shall be read from.  If
      no node *name* can be found at *where*, the first node at
      *where* whose *_filename* attribute matches *name* will be read.

    overwrite
      Whether or not a possibly existing file of the specified
      *filename* shall be overwritten.

    create_target
      Whether or not the folder hierarchy needed to accomodate the
      given target ``filename`` will be created.

    """
    new_h5file = not isinstance(h5file, tables.file.File)
    f = tables.File(h5file, "r") if new_h5file else h5file
    try:
        fnode = open_node(f.get_node(where=where, name=name))
    except tables.NoSuchNodeError:
        fnode = None
        for n in f.walk_nodes(where=where, classname="EArray"):
            if n.attrs._filename == name:
                fnode = open_node(n)
                break
        if fnode is None:
            f.close()
            raise tables.NoSuchNodeError("A filenode '%s' cannot be found at "
                                         "'%s'" % (name, where))

    # guess output filename if necessary
    if os.path.isdir(filename) or filename.endswith(os.path.sep):
        try:
github pierregm / scikits.timeseries-sandbox / scikits / timeseries / lib / tstables.py View on Github external
where : Group
        Location of the table.
    name : string
        Name of the table.
    maskedarray : MaskedArray
        Masked array to store
    title : {'', string}, optional
        Title of the table
    """
    parentNode = self._getOrCreatePath(where, createparents)
    _checkfilters(filters)
    return MaskedTable(parentNode, name, maskedarray,
                       title=title, filters=filters, 
                       expectedrows=expectedrows,
                       chunkshape=chunkshape, byteorder=byteorder)
File.createMaskedTable = createMaskedTable



def createTimeSeriesTable(self, where, name, series, title="",
                          filters=None, expectedrows=10000,
                          chunkshape=None, byteorder=None,
                          createparents=False):
    """
    Creates a :class:`TimeSeriesTable` from
    a :class:`~scikits.timeseries.TimeSeries` object.

    Parameters
    ----------
    where : Group
        Location of the table.
    name : string
github acoular / acoular / acoular / spectra.py View on Github external
desc="basename for cache file")

    #: The cross spectral matrix, 
    #: (number of frequencies, numchannels, numchannels) array of complex
    #: readonly.
    csm = Property( 
        desc="cross spectral matrix")

    # internal identifier
    digest = Property( 
        depends_on = ['time_data.digest', 'calib.digest', 'block_size', 
            'window', 'overlap'], 
        )

    # hdf5 cache file
    h5f = Instance(tables.File, transient = True )
    
    traits_view = View(
        ['time_data@{}', 
         'calib@{}', 
            ['block_size', 
                'window', 
                'overlap', 
                    ['ind_low{Low Index}', 
                    'ind_high{High Index}', 
                    '-[Frequency range indices]'], 
                    ['num_blocks~{Number of blocks}', 
                    'freq_range~{Frequency range}', 
                    '-'], 
                '[FFT-parameters]'
            ], 
        ],
github ver228 / tierpsy-tracker / work_in_progress / Intensity_analysis / getIntensityMaps_ver2.py View on Github external
#trajectories_data = trajectories_data[trajectories_data['frame_number']>1750]
        #trajectories_data = trajectories_data[trajectories_data['frame_number']<1920]
        #trajectories_data = trajectories_data[trajectories_data['worm_index_joined'] == 16]
       
       
    trajectories_data['int_map_id'] = np.arange(len(trajectories_data))
    
    tot_rows = len(trajectories_data)
        
    #let's save this data into the intensities file
    with tables.File(intensities_file, 'w') as fid:
        fid.create_table('/', 'trajectories_data', \
            obj = trajectories_data.to_records(index=False), filters=table_filters)

    with tables.File(masked_image_file, 'r')  as mask_fid, \
         tables.File(skeletons_file, 'r') as ske_file_id, \
         tables.File(intensities_file, "r+") as int_file_id:
        
        #pointer to the compressed videos
        mask_dataset = mask_fid.get_node("/mask")
        
        #pointer to skeletons
        skel_tab = ske_file_id.get_node('/skeleton')
        skel_width_tab = ske_file_id.get_node('/width_midbody')
        

        
        #create array to save the intensities
        filters = tables.Filters(complevel=5, complib='zlib', shuffle=True)
        
        worm_int_avg_tab = int_file_id.create_carray("/", "straighten_worm_intensity_median", \