How to use the klepto._archives.hdf_archive function in klepto

To help you get started, we’ve selected a few klepto examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github uqfoundation / klepto / klepto / _archives.py View on Github external
"store output (and possibly input) in a subdirectory"
          _key = TEMP+hash(random(), 'md5')
          # create an input file when key is not suitable directory name
          if self._fname(key) != key: input=True #XXX: errors if protocol=0,1?
          # create a temporary directory, and dump the results
          try:
              _file = os.path.join(self._mkdir(_key), self._file)
              if input: _args = os.path.join(self._getdir(_key), self._args)
              adict = {'serialized':self.__state__['serialized'],\
                       'protocol':self.__state__['protocol'],\
                       'meta':self.__state__['meta']}
              #XXX: assumes one entry per file; ...could use name as key?
              memo = hdf_archive(_file, **adict)
              memo[None] = value
              if input:
                  memo = hdf_archive(_args, **adict)
                  memo[None] = key
          except (OSError,TypeError):
              "failed to populate directory for '%s'" % str(key)
          # move the results to the proper place
          try: #XXX: possible permissions issues here
              self._rmdir(key) #XXX: 'key' must be a suitable dir name
              os.renames(self._getdir(_key), self._getdir(key))
#         except TypeError: #XXX: catch key that isn't converted to safe filename
#             "error in populating directory for '%s'" % str(key)
          except OSError: #XXX: if rename fails, may need cleanup (_rmdir ?)
              "error in populating directory for '%s'" % str(key)
      def _get_args(self):
github uqfoundation / klepto / klepto / _archives.py View on Github external
def _store(self, key, value, input=False):
          "store output (and possibly input) in a subdirectory"
          _key = TEMP+hash(random(), 'md5')
          # create an input file when key is not suitable directory name
          if self._fname(key) != key: input=True #XXX: errors if protocol=0,1?
          # create a temporary directory, and dump the results
          try:
              _file = os.path.join(self._mkdir(_key), self._file)
              if input: _args = os.path.join(self._getdir(_key), self._args)
              adict = {'serialized':self.__state__['serialized'],\
                       'protocol':self.__state__['protocol'],\
                       'meta':self.__state__['meta']}
              #XXX: assumes one entry per file; ...could use name as key?
              memo = hdf_archive(_file, **adict)
              memo[None] = value
              if input:
                  memo = hdf_archive(_args, **adict)
                  memo[None] = key
          except (OSError,TypeError):
              "failed to populate directory for '%s'" % str(key)
          # move the results to the proper place
          try: #XXX: possible permissions issues here
              self._rmdir(key) #XXX: 'key' must be a suitable dir name
              os.renames(self._getdir(_key), self._getdir(key))
#         except TypeError: #XXX: catch key that isn't converted to safe filename
#             "error in populating directory for '%s'" % str(key)
          except OSError: #XXX: if rename fails, may need cleanup (_rmdir ?)
              "error in populating directory for '%s'" % str(key)
      def _get_args(self):
github uqfoundation / klepto / klepto / _archives.py View on Github external
def copy(self, name=None): #XXX: always None? or allow other settings?
          "D.copy(name) -> a copy of D, with a new archive at the given name"
          filename = self.__state__['id']
          if name is None: name = filename
          else: shutil.copy2(filename, name) #XXX: overwrite?
          adict = hdf_archive(filename=name, **self.state)
         #adict.update(self.__asdict__())
          return adict
      def fromkeys(self, *args): #XXX: build a dict (not an archive)?
github uqfoundation / klepto / klepto / _archives.py View on Github external
def _lookup(self, key, input=False):
          "get input or output from subdirectory name"
          _dir = self._getdir(key)
          _file = self._args if input else self._file
          _file = os.path.join(_dir, _file)
          try:
              adict = {'serialized':self.__state__['serialized'],\
                       'protocol':self.__state__['protocol'],\
                       'meta':self.__state__['meta'], 'cached':False}
              #XXX: assumes one entry per file; ...could use name as key?
              #XXX: alternately, could store {key:value} (i.e. use one file)?
              memo = tuple(hdf_archive(_file, **adict).__asdict__().values())[0]
             #memo = next(iter(hdf_archive(_file, **adict).values()))
          except: #XXX: should only catch the appropriate exceptions
              memo = None
              #FIXME: not sure if _lookup should delete a key in all cases
              #FIXME: (maybe only delete key when it's new, but fails)
              #self._rmdir(key) # don't leave a keyfile on disk
              raise KeyError(key)
             #raise OSError("error reading directory for '%s'" % key)
          return memo
      def _store(self, key, value, input=False):
github uqfoundation / klepto / klepto / archives.py View on Github external
def __new__(hdf_archive, name=None, dict=None, cached=True, **kwds):
        """initialize a dictionary with a single hdf5 file archive backend

    Inputs:
        name: name of the hdf file archive [default: memo.hdf5]
        dict: initial dictionary to seed the archive
        cached: if True, use an in-memory cache interface to the archive
        serialized: if True, pickle file contents; otherwise save python objects
        protocol: pickling protocol [default: None (use the default protocol)]
        meta: if True, store as file root metadata; otherwise store in datasets
        """
        if dict is None: dict = {}
        archive = _hdf_archive(name, **kwds)
        if cached: archive = cache(archive=archive)
        archive.update(dict)
        return archive