How to use the lmdb.Environment function in lmdb

To help you get started, we’ve selected a few lmdb examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github dsindex / blog / make_lmdb.py View on Github external
parser = OptionParser()
    parser.add_option("--verbose", action="store_const", const=1, dest="verbose", help="verbose mode")
    parser.add_option("-d", "--db", dest="dbpath",help="db path", metavar="DB")
    (options, args) = parser.parse_args()

    if options.verbose == 1 : VERBOSE = 1

    db_path = options.dbpath
    if db_path == None :
        parser.print_help()
        sys.exit(1)

    startTime = time.time()

    # env == db coz max_dbs=0
    env = lmdb.Environment(db_path,map_size=24*(1023**3),subdir=False,readonly=False,create=False,max_dbs=0,lock=False)
    txn = lmdb.Transaction(env,db=None,write=True)
    
    linecount = 0
    while 1 :
        try : line = sys.stdin.readline()
        except KeyboardInterrupt : break
        if not line : break
        try : line = line.strip()
        except : continue
        if not line : continue
        linecount += 1
        if linecount % 1000 == 0 :
            sys.stderr.write("[linecount]" + "\t" + str(linecount) + "\n")

        key,value = line.split('\t',1)
        if not key or not value : continue
github vertexproject / synapse / synapse / tools / migrate_010.py View on Github external
self.core = core
        self.dbenv = None
        self.skip_stage1 = bool(stage1_fn)
        assert tmpdir or stage1_fn
        self.next_val = 0
        self.rejects_fh = rejects_fh
        self.good_forms = set(good_forms) if good_forms is not None else None

        if stage1_fn is None:
            with tempfile.NamedTemporaryFile(prefix='stage1_', suffix='.lmdb', delete=True, dir=str(tmpdir)) as fh:
                stage1_fn = fh.name
            logger.info('Creating stage 1 file at %s.  Delete when migration deemed successful.', stage1_fn)

        map_size = s_lmdb.DEFAULT_MAP_SIZE
        self.dbenv = lmdb.Environment(stage1_fn,
                                      map_size=map_size,
                                      subdir=False,
                                      metasync=False,
                                      writemap=True,
                                      max_readers=1,
                                      max_dbs=4,
                                      sync=False,
                                      lock=False)
        self.iden_tbl = self.dbenv.open_db(key=b'idens', dupsort=True)  # iden -> row
        self.form_tbl = self.dbenv.open_db(key=b'forms', dupsort=True)  # formname -> iden
        self.comp_tbl = self.dbenv.open_db(key=b'comp')  # guid -> comp tuple
        self.valu_tbl = self.dbenv.open_db(key=b'vals', integerkey=True)
        self.outfh = outfh
        self._precalc_types()
        self.first_forms = ['file:bytes'] + [f for f in reversed(_comp_and_sepr_forms) if self.is_comp(f)]
github hyperledger / grid-contrib / validator / sawtooth_validator / database / lmdb_database.py View on Github external
Args:
            filename (str): The filename of the database file.
            flag (str): a flag indicating the mode for opening the database.
                Refer to the documentation for anydbm.open().
        """
        super(LMDBDatabase, self).__init__()
        self._lock = RLock()

        create = bool(flag == 'c')

        if flag == 'n':
            if os.path.isfile(filename):
                os.remove(filename)
            create = True

        self._lmdb = lmdb.Environment(path=filename,
                                      map_size=1024**4,
                                      writemap=True,
                                      subdir=False,
                                      create=create,
                                      lock=False)
github SeanTater / uncc2014watsonsim / scripts / gensim / vstore.py View on Github external
def __init__(self, filename, name):
		''' Create a lmdb-backed VStore using a cached environment '''
		if filename not in self._allenvs:
			self._allenvs[filename] = lmdb.Environment(filename,
				map_size=100<<30,
				max_dbs=100)
		self._env = self._allenvs[filename]
		self._db = self._env.open_db(name);
github Megvii-CSG / MegReader / scripts / json_to_lmdb.py View on Github external
def main(json_path=None, lmdb_path=None):
    assert json_path is not None, 'json_path is needed'
    if lmdb_path is None:
        lmdb_path = json_path

    meta = os.path.join(json_path, 'meta.json')
    data_ids = []
    value = {}
    env = lmdb.Environment(lmdb_path, subdir=True,
                           map_size=int(1e9), max_dbs=2, lock=False)
    db_extra = env.open_db('extra'.encode(), create=True)
    db_image = env.open_db('image'.encode(), create=True)
    with open(meta, 'r') as meta_reader:
        for line in tqdm(meta_reader):
            single_meta = json.loads(line)
            data_id = os.path.join(json_path, single_meta['filename'])
            data_id = str(data_id.encode('utf-8').decode('utf-8'))
            with open(data_id.encode(), 'rb') as file_reader:
                image = file_reader.read()
            value['extra'] = {}
            for key in single_meta['extra']:
                value['extra'][key] = single_meta['extra'][key]
            with env.begin(write=True) as lmdb_writer:
                lmdb_writer.put(data_id.encode(),
                                pickle.dumps(value), db=db_extra)
github Megvii-CSG / MegReader / scripts / nori_to_lmdb.py View on Github external
def main(nori_path, lmdb_path=None):
    if lmdb_path is None:
        lmdb_path = nori_path
    env = lmdb.Environment(lmdb_path, map_size=int(
        5e10), writemap=True, max_dbs=2, lock=False)
    fetcher = nori.Fetcher(nori_path)
    db_extra = env.open_db('extra'.encode(), create=True)
    db_image = env.open_db('image'.encode(), create=True)
    with nori.open(nori_path, 'r') as nr:
        with env.begin(write=True) as writer:
            for data_id, data, meta in tqdm(nr.scan()):
                value = {}
                image = fetcher.get(data_id)
                value['extra'] = {}
                for key in meta['extra']:
                    value['extra'][key] = meta['extra'][key]
                writer.put(data_id.encode(), pickle.dumps(value), db=db_extra)
                writer.put(data_id.encode(), image, db=db_image)
    env.close()
    print('Finished')
github marsermd / DeepestScatter / DeepestScatter_Train / Datasets.py View on Github external
def __init__(self, databasePath, readonly=True):
        create = not readonly
        self.env = Environment(databasePath, subdir=False, max_dbs=64, mode=0, create=create, readonly=readonly)
        self.descriptorToDb = {}
        self.nextIds = {}
        self.scenes_db = self.__addDb(SceneSetup, create=create)
        self.scatter_db = self.__addDb(ScatterSample, create=create)
        self.results_db = self.__addDb(Result, create=create)
github hyperledger / sawtooth-core / validator / sawtooth_validator / database / lmdb_nolock_database.py View on Github external
Args:
            filename (str): The filename of the database file.
            flag (str): a flag indicating the mode for opening the database.
                Refer to the documentation for anydbm.open().
        """
        super(LMDBNoLockDatabase, self).__init__()

        create = bool(flag == 'c')

        if flag == 'n':
            if os.path.isfile(filename):
                os.remove(filename)
            create = True

        self._lmdb = lmdb.Environment(path=filename,
                                      map_size=1024**4,
                                      map_async=True,
                                      writemap=True,
                                      subdir=False,
                                      create=create,
                                      lock=True)
github oddjobz / pynndb / pynndb / database.py View on Github external
def __init__(self, name, conf=None, binlog=True, size=None, master=False):
        conf = dict(self._conf, **conf.get('env', {})) if conf else self._conf
        if size: conf['map_size'] = size
        self._tables = {}
        self._semaphore = False
        self._name = name
        self._env = lmdb.Environment(name, **conf)
        self._db = self._env.open_db()
        self._binlog = None
        self._binidx = None
        self._meta = self.table('__metadata__')
        doc = self._meta.get(b'__node__')
        self._node = doc.get('value') if doc else 0

        try:
            self.set_binlog(enable=False)
            if binlog:
                self.set_binlog(enable=True)
                # try:
                #     self._semaphore = Semaphore(semaphore_path(name))
                # except ExistentialError:
                #     pass
                self._binlog = self._env.open_db(b'__binlog__', create=binlog)
github mitmul / ssai / scripts / create_dataset.py View on Github external
def create_patches(sat_patch_size, map_patch_size, stride, map_ch,
                   sat_data_dir, map_data_dir,
                   sat_out_dir, map_out_dir):
    if os.path.exists(sat_out_dir):
        shutil.rmtree(sat_out_dir)
    if os.path.exists(map_out_dir):
        shutil.rmtree(map_out_dir)
    os.makedirs(sat_out_dir)
    os.makedirs(map_out_dir)

    # db
    sat_env = lmdb.Environment(sat_out_dir, map_size=1099511627776)
    sat_txn = sat_env.begin(write=True, buffers=True)
    map_env = lmdb.Environment(map_out_dir, map_size=1099511627776)
    map_txn = map_env.begin(write=True, buffers=True)

    # patch size
    sat_size = sat_patch_size
    map_size = map_patch_size
    print 'patch size:', sat_size, map_size, stride

    # get filenames
    sat_fns = np.asarray(sorted(glob.glob('%s/*.tif*' % sat_data_dir)))
    map_fns = np.asarray(sorted(glob.glob('%s/*.tif*' % map_data_dir)))
    index = np.arange(len(sat_fns))
    np.random.shuffle(index)
    sat_fns = sat_fns[index]
    map_fns = map_fns[index]

    # create keys