How to use the dill.load function in dill

To help you get started, we’ve selected a few dill examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github trolldbois / python-haystack / haystack / reverse / pattern.py View on Github external
def _loadAddressCache(self):
        # DO NOT SORT LIST. c'est des sequences. pas des sets.
        myname = self.cacheFilenamePrefix + '.pinned.vaddr'
        if os.access(myname, os.F_OK):
            addressCache = dill.load(file(myname, 'r'))
            log.debug(
                "%d Signature addresses loaded from cache." %
                (len(addressCache)))
            self.addressCache.update(addressCache)
        else:  # get at least 10 values
            for i in xrange(0, len(self), len(self) / 10):
                self.getAddressForPreviousPointer(i)
            self._saveAddressCache()
        return
github ergonomica / ergonomica / ergonomica / lib / lib / ergo_implication.py View on Github external
def implication(events, functions):

    
    try:
        # load implications store
        implications = dill.load(open(os.path.expanduser('~/.ergo/.events')))
    except IOError:
        # initialize the implications as a blank dictionary.
        # if the file doesn't already exist, it will be dumped
        # with the new data created.
        implications = {}
        

        
    if not isinstance(events, list):
        events = [events]

    if not isinstance(functions, list):
        functions = [functions]

    for event in events:
        implications[event] = implications.get(event, []) + functions
github kekmodel / gym-tictactoe-zero / evaluator.py View on Github external
def _load_tree(self, path):
        with open(path, 'rb') as f:
            tree_memory = pickle.load(f)
            return tree_memory
github radiodee1 / awesome-chatbot / torch_t2t / torch_t2t_transformer_train.py View on Github external
def prepare_dataloaders(opt, device):
    batch_size = opt.batch_size
    data = pickle.load(open(opt.data_pkl, 'rb'))

    if opt.vocab_file and os.path.isfile(opt.vocab_file):
        data_vocab = pickle.load(open(opt.vocab_file, 'rb'))

        data['vocab'] = data_vocab['vocab']
        data['settings'].max_len = len(data_vocab['vocab']['src'].vocab.stoi)

        print(len(data['vocab']['txt'].vocab), len(data_vocab['vocab']['txt'].vocab), 'length of vocab.')

    fields = {'src': data['vocab']['txt'], 'trg': data['vocab']['txt']}


    if opt.embs_share_weight:
        assert data['vocab']['src'].vocab.stoi == data['vocab']['trg'].vocab.stoi, \
            'To sharing word embedding the src/trg word2idx table shall be the same.'
github zbeaver4 / python-webpage-monitor-slackbot / plugins / monitorbot / monitorbot.py View on Github external
def undillify(url, str_version = False):
    '''Reads back in a serialized object matching the filename of the given url'''
    
    fn = os.path.join('webpage_cache', strip_url(url) + '.dill')
    string_version = dill.load(open(fn, 'rb'))
    
    if str_version:
        return string_version
    else:
        return BeautifulSoup(string_version)
github srvk / eesen / tf / ctc-train / RNN / RNN_clip_align.py View on Github external
Ah = "relu",               # hidden unit activation (e.g. relu, tanh, lstm)
                 Ay = "linear",             # output unit activation (e.g. linear, sigmoid, softmax)
                 predictPer = "frame",      # frame or sequence
                 loss = None,               # loss function (e.g. mse, ce, ce_group, hinge, squared_hinge)
                 L1reg = 0.0,               # L1 regularization
                 L2reg = 0.0,               # L2 regularization
                 momentum = 0.0,            # SGD momentum
                 seed = 15213,              # random seed for initializing the weights
                 frontEnd = None,           # a lambda function for transforming the input
                 filename = None,           # initialize from file
                 initParams = None,         # initialize from given dict
                ):

        if filename is not None:            # load parameters from file
            with smart_open(filename, "rb") as f:
                initParams = dill.load(f)
        if initParams is not None:          # load parameters from given dict
            self.paramNames = []
            self.params = []
            for k, v in initParams.iteritems():
                if type(v) is numpy.ndarray:
                    self.addParam(k, v)
                else:
                    setattr(self, k, v)
                    self.paramNames.append(k)
            # F*ck, locals()[k] = v doesn't work; I have to do this statically
            Nlayers, Ndirs, Nx, Nh, Ny, Ah, Ay, predictPer, loss, L1reg, L2reg, momentum, frontEnd \
                = self.Nlayers, self.Ndirs, self.Nx, self.Nh, self.Ny, self.Ah, self.Ay, self.predictPer, self.loss, self.L1reg, self.L2reg, self.momentum, self.frontEnd
        else:                           # Initialize parameters randomly
            # Names of parameters to save to file
            self.paramNames = ["Nlayers", "Ndirs", "Nx", "Nh", "Ny", "Ah", "Ay", "predictPer", "loss", "L1reg", "L2reg", "momentum", "frontEnd"]
            for name in self.paramNames:
github philipperemy / speaker-change-detection / audio / audio_reader.py View on Github external
FILENAME: filename}
                except librosa.util.exceptions.ParameterError as e:
                    logger.error(e)
                    logger.error('[DUMP AUDIO ERROR SKIPPING FILENAME] {}'.format(filename))
            dill.dump(self.metadata, open(os.path.join(TMP_DIR, 'metadata.pkl'), 'wb'))

        logger.debug(
            'Using the generated files at {}. Using them to load the cache. Be sure to have enough memory.'.format(
                TMP_DIR))
        self.metadata = dill.load(open(os.path.join(TMP_DIR, 'metadata.pkl'), 'rb'))

        pickle_files = find_files(TMP_DIR, pattern='*.pkl')
        for pkl_file in tqdm(pickle_files, desc='reading cache'):
            if 'metadata' not in pkl_file:
                with open(pkl_file, 'rb') as f:
                    obj = dill.load(f)
                    self.cache[obj[FILENAME]] = obj
        logger.debug('Cache took {0:.2f} seconds to load. {1:} keys.'.format(time() - st, len(self.cache)))
github lesgourg / class_public / DarkAgesModule / DarkAges / transfer.py View on Github external
def transfer_load(infile):
	u"""Reloads an instance of the :class:`transfer `
	-class dumped with :meth:`transfer_dump `

	Parameters
	----------
	infile : :obj:`str`
		Filename (absolute or relative) under which the transfer instance is stored

	Returns
	-------
	:obj:`class`
		Restored instance of the :class:`transfer `-class
	"""

	loaded_transfer = dill.load(open(infile, 'rb'))
	if not isinstance(loaded_transfer, transfer):
		from .__init__ import DarkAgesError
		raise DarkAgesError('The file {0} does not provide a proper instance of the class "transfer"'.format(infile))
	else:
		return loaded_transfer
github Rapid-Design-of-Systems-Laboratory / beluga / examples / Mansell / HannibalPlot.py View on Github external
import scipy.ndimage as ndimage
from mpl_toolkits.mplot3d import Axes3D
from matplotlib import cm

#Load image
img=Image.open('terrain_test2.jpg')
img=np.array(img)
img=np.asfarray(img)/255.0 #Scale to unity scale (and convert to float - important)
img=ndimage.gaussian_filter(img,sigma=5,order=0) #Smooth image
Xcoords=np.linspace(0,10,len(img[:,0])) #North Coordinates
Ycoords=Xcoords[0:len(img[0,:])] #East Coordinates
terr_spl=interpolate.RectBivariateSpline(Xcoords,Ycoords,img,kx=3,ky=3,s=10)

#Load data
f = open('data.dill','rb')
out=dill.load(f)
f.close()
xsol=out['solution'][-1][-1].y[0,:]
ysol=out['solution'][-1][-1].y[1,:]
lamXsol=out['solution'][-1][-1].y[2,:]
lamYsol=out['solution'][-1][-1].y[3,:]
tf=out['solution'][-1][-1].y[4,0]
tsol=out['solution'][-1][-1].x*tf

#Plot contour plot
Xcoords=np.linspace(0,10,len(img[:,0]))
Ycoords=Xcoords[0:len(img[0,:])]
Y,X = np.meshgrid(Ycoords, Xcoords)
terrData=terr_spl.ev(X,Y) #Create the elevation data based on the spline fit

plt.figure(1)
plt.axis([0,Ycoords[-1],0,Xcoords[-1]])
github kalekiu / easyesn / src / easyesn / easyesn / BaseESN.py View on Github external
def load(path):
        f = open(path, "rb")
        result = pickle.load(f)
        f.close()
        return result